var/home/core/zuul-output/0000755000175000017500000000000015136206324014527 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015136210140015462 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000177417215136210006020263 0ustar corecoreyikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD p*|3ڋI_翪|mvşo#oVݏKf+ovpZjC4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8T*v (6pk**+ Le*gUWi [ӊg*|FEbп3 FKX1QRQlrTvb)E,s)Wɀ;$#LcdHM%vz_. o~I|3j dF{ "IΩ?PF~J~ ` 17ׅwڋًM)$Fiqw7Gt7L"u 0V9c  ˹dvYļU[ Z.׿-h QZ*U1|t5wKOؾ{mk b21ΣԟRxxXԨkJ[8 ";ЗH F=y܇sθm@%*'9qvD]9X&;cɻs0I٘]_fy tt('/V/TB/ap+V9g%$P[4D2L'1bЛ]\s΍ic-ܕ4+ޥ^.w[A9/vb֜}>| TXNrdTs>RDPhإek-*듌D[5l2_nH[׫yTNʹ<ws~^B.Ǔg'AS'E`hmsJU # DuT%ZPt_WďPv`9 C|mRj)CMitmu׀svRڡc0SAA\c}or|MKrO] g"tta[I!;c%6$V<[+*J:AI \:-rR b B"~?4 W4B3lLRD|@Kfځ9g ? j럚Sř>]uw`C}-{C):fUr6v`mSΟ1c/n߭!'Y|7#RI)X)yCBoX^P\Ja 79clw/H tBFKskޒ1,%$BվCh,xɦS7PKi0>,A==lM9Ɍm4ެ˧jOC d-saܺCY "D^&M){ߘ>:i V4nQi1h$Zb)ŠȃAݢCj|<~cQ7Q!q/pCTSqQyN,QEFKBmw&X(q8e&щu##Ct9Btka7v Ө⸇N~AE6xd~?D ^`wC4na~Uc)(l fJw>]cNdusmUSTYh>Eeք DKiPo`3 aezH5^n(}+~hX(d#iI@YUXPKL:3LVY~,nbW;W8QufiŒSq3<uqMQhiae̱F+,~Mn3 09WAu@>4Cr+N\9fǶy{0$Swwu,4iL%8nFВFL2#h5+C:D6A@5D!p=T,ښVcX㯡`2\fIԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZx[|mi ǿ/&GioWiO[BdG.*)Ym<`-RAJLڈ}D1ykd7"/6sF%%´ƭ*( :xB_2YKoSrm_7dPΣ|ͣn/𚃚p9w#z A7yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^o(מO80$QxBcXE ء\G=~j{Mܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}S^vv^2M!.xR0I(P 'fΑQ)ۢWP Pe>F=>l |fͨ3|'_iMcĚIdo阊;mT!{avdt)8|mg定TN7,TEXt+`F P |ɧ<Ғ8_iqE b}$B#fethBE;1"l r  B+R6Qp%;R8P󦟶Ub-L::;Ⱦ7,VW.JE:PgXoΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t!X򴱞_aM:E.Qg1DllЊE҉L ehJx{̗Uɾ?si&2"C]u$.`mjmƒVe9f6NŐsLu6fe wkىKR%f"6=rw^)'Hz }x>1yFX09'A%bDb0!i(`Z;TyֻΗ|ִ0-6dAC5t[OM91c:VJR9&ksvJ;0ɝ$krogB= FYtЩOte=?>T&O{Ll)HClba1PIFĀ":tu^}.&R*!^pHPQuSVO$.KMb.:DK>WtWǭKv4@Va3"a`R@gbu%_J5Ґ 3Dcޙ-did˥]5]5᪩QJlyIPEQZȰ<'$BrW XWz<%fpG"m%6PGEH^*JL֗J)oEv[Ң߃x[䚒}0BOnYr猸p$nu?ݣ RF]NHw2k혿q}lrCy u)xF$Z83Ec罋}[εUX%}< ݻln"sv&{b%^AAoۺ(I#hKD:Bߩ#蘈f=9oN*.Ѓ M#JC1?tean`3-SHq$2[ĜSjXRx?}-m6Mw'yR3q㕐)HW'X1BEb $xd(21i)//_і/Cޮm0VKz>I; >d[5Z=4>5!!T@[4 1.x XF`,?Hh]b-#3J( &uz u8.00-(9ŽZcX Jٯ^蒋*k.\MA/Xp9VqNo}#ƓOފgv[r*hy| IϭR-$$m!-W'wTi:4F5^z3/[{1LK[2nM|[<\t=3^qOp4y}|B}yu}뚬"P.ԘBn방u<#< A Q(j%e1!gkqiP(-ʢ-b7$66|*f\#ߍp{8sx[o%}wS`ýͽ>^U_S1VF20:d T2$47mSl*#lzFP_3yb.63>NKnJۦ^4*rB쑓:5Ǧ٨C.1`mU]+y_:,eXX맻c5ޖSwe݊O4L)69 War)|VϟT;Cq%KK-*i ѩQٰ`DݎGu( 꿢\cXn }7Ҫa nG{Y bcWa?\34 P U!7 _* kTuwmUr%ԀjƮĀdU#^ۈӕ3ΊeBO`^}ܖj49lnAvoI "%\;OF& wctغBܮl##mϸ.6p5k0C5PdKB g:=G<$w 24 6e/!~߽f)Q UbshY5mseڠ5_m4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@ L⃳662G,:* $: e~7[/P%F on~$dƹɥO"dޢt|BpYqc@P`ڄj҆anCѢMU sf`Yɇك]@Rɯ?ٽf? ntպ$ˣ>TDNIGW .Z#YmDvS|]F)5vSsiExţ=8#r&ᘡĩDȈ\d cRKw*#zJ9tT :<XK*ɤwoJarExfKB4t@y[6OO6qDfEz]1,ʹB֒H ֱw;SpM8hGG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_q7:.rRGT;}:֪a$)gPSj0j3hLư/7:D-F۶c}87uixoxG+5EekV{:_d* |a%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKd9A7ٻڦ6e0o=/1Ƙk'E9ul`]#sȯ?a ǧPoUC/ߟV=jѻR=:ʪlIZyGFfЬD($Ҡ!nAF" >8UKͼ>2Ȑfk ٜ6UxTHzy &\@yEudNYCgkxJC=DA+ytx؞TT!D' !Se%Ӭ". `{2E4fջ01N\٬=:"-K5+Wafjonp:PM/ÓWUVULF"k7,K-5Fk7Tb+X, sKHD-]ěDxEV#XFҼ7 $2MBDuYhu,%Eޔ/b>^,z:dޅ :`X;;E4<7U\Ԉg=UJ) T˻n2b}r buw/U{d:_3`:nR</r`kC%Xhr=>I+|HBa_"\\0F)!$ެT+1{YZ&T*do>nsAxrͬ`|yglaB {*ch׀7ԀS V'sf&wFb2 Wbuw̑F3%>!g^O'̍ `*^̛?xw=']z'/w)b$ eCr0gd]0zٜ>8k_-hmv0hWCi ٸȺ?l+ꧮLPuDZhr5|Fa#t^M~rq].\mf)e04ya}N{y/da1'NR&b 1M7@ɼ"ΐ% @}]qdl{,> VGbR:G:རJFwI.xk* %XK>,{$ǝ9WY]M%YPj#}μZʢ$+H\fphوy1L]А18e/;0D G5n3M.{7OV#@dpzDɾs #rJ=BIIfi|o cE sXdmFS/wDQJ)eYG_uN.@D2'I1ygjlw5[ܲ7kҍ@DZJ#ﶌoNnԦ=f&!0P$$/X?ۙ\G#e,&|GL y3f/hD4[9rw';AH#%jrEKKޅ) 9QPXdFf|@ DϚ"@hƒy>lѺs<ȫ;ss{MS7Y! ˇ NMI3F.Gm$}j8nW# ھj9vVܶk=jX`B^F΢CIS3FS ֞ԻQɀ=k}^²v/rI%e)npѥ,7!{tNp{l!$*:͟sȜ&0gw1gB7kxA DPb#_Xvsrׇ^.8F5Hݪu|dV kH .EYp.[Y׮CDr;mbk!h@*LwjZ`5C\a};VƔв^U_6X43ȱ ٷ~Ď99n-ٟ`2 _)J!fG0{Yޝ.\"k➩.<5'9~_>۟p6^9n<(Ec zCM2.I4/#tQ?0M./hL.i7[1L;*]vGCSdM?q>+ wZ$sсN )!"S.!d4M.R4&DCd6OpL|˯SVVV@\kžڃ+Nk]QdMGxN!mU`䫟d˧y'g_ssL#r:s1 nIWIFcJD5yR0}_5x98-$vPXbаCOvF !6RYt+ Y+Y}#̶evA;)Y G :|}/]jCf4L;K"cȚ~IfXKsN?j"X0@&uW4d2E1Z&80M^!PP42J>_b|q Y|B)vL,T@1A&[&V rʧ%IOOѿDBxN6IȆG}+~t +|4;R"fZbPBȐ|X>]z\I֏[')|ݯN;\y}|${2uXDp!1Ձl|ZD$o)cos \tr>Ah}ʮ-ϷQ ۊ*@:\ vꍬY$kqW >&U'm&A(vK* [RU_]RŬ4"iLoS* P x%k/p6Vw+QJpN|J%ǽv?] ?ӘfNH!ZuL}!ǔf_xE@)F^4S3!H'#̖w19`}X0he%MVꠅ`+&J~wY0_A;O{ bWm٭e>M+ݏJBHŋR2D.(cR.ͨ|(9&keGm%{>cᤙ^g0>էw[nZ}"ujG<v?:6V;B!~fy]S%)V}m|X!lk҃pnU5:ZoXZlwoibclůd >04)6fbmPTi-"] /QO*(pco:{*CL…5v,V:{[g./IBfJ9u+Q·B"Q F>wTLAtUG>)d 8scM6MY 6ses~}3ɱ7¹ R$QӅ#tӚe~=W|X-9eb| X1lmU o玳,Ru!`:m:S5PAxtL)Φ *<:1VArӍ?LT%l)= Κz[Ư +RVDžub`WlUai,|Fߟ|. d3EjFi .}&bXԈedKX?uV:fhiqgTĔO5 ĩRVD7ֶ].coa@>RX=4OZS׹Un %xccDa.E h :R]cmuNVC'xZ㣪dC$1aH_d$1'/8[Z4 hEݸDt#{"dq1v>jkT9F-)L)ŘF"uVK j0=7ŰrԋwEDYp{[q`Ȝ/{ 9a>E)XOS%EGƶ*:8DO,FC#zNzySsTtFF> z+KVbZ˙#G6 `8U֕T+g_դ}Y({ -.DaPqobb,n 2w K1,jX4W->L!Uc3Fdk4ե}A>d2UؖvlX6|\!O*/m}#r*E":vSz]k/I,[,|vg>=u^\J+Ȫcv/#4RX :锉voqcU!} /5O߁ݛǪ0q,7F%IP Rڸ7p~ -cl'nRw⠫R"X3 P[ɒkX9U1>V.)W%'X +Uv`=i:ԝ!,Z#Vb rရ\sNC/T"* Q:!9WNSUXM{? 1:)6k)|DN`cv@vDN. aU9O 3u5GY \M뿆Xk3۾ `7LzP9&`|8|Ldl?J21٤MRuy}z{~n\CXߍ|}&zo쟮ͳ,rDY{yM1nu ?.^T卮7sy}X~澼;uwuSkU]_~O-=ί:n'ܸ՟ +[)/:n埢&/8n/!qۅl@Ud_h*m;' `0m!?Z:ZoBeH1Y8J-ޖcS'5Y_䁖%5ғdcY'HA[8kL8E)J\8Ɂ,GMlʂ+lh)1>Z.|>VL h5^7eM>y̆@jCxZ\c| 6?EV"sd!N@БU ^p+Jpᄟ,C'[|BUxY$߾lvi `i2k#@+j۟gWb 1؁c~L :4^ )lp 2\. }B \5>ToEχi)e0\lCMlɱĘR7HebLϧB|+QcX6.v@H98Vctq6H=m*2*q>C!9U;q ^1puOSvBHHmQ (6c94Ce|u5ӄ|  ]وb Xiצ) &5TN]SxL\{n9:3/Y|dH]m-|kۗ~sG4bzgek#+L;:)q!l0/ѝDҥ*Ha4-s>Vz[~) xs%P ع owmWuIC.-} KJI3;j !ҝd`>&5mKMf#pBT+[n*cem:y0W|c0Vvގj$Sp^M+f~D`{be@0o2cވu~-B竟} |6K-uT0\8"M:" /P:3`l' .Z cEpN9K19`ҽFpU۞]tLΦCsy#rFscCbX%E+o*ƾtF*`NΛv7oϯ +Ōa`HDN74Т C>F|$A:XB8dJWqLhnٓԻfl8fp.CDsc3k.2WM:U~{N_>!Lc/rK-vv%~ =WBX"XA:#u-9`x 92$4_!9WvT` d0ϕ_G\ Bؒ G.}ΕU&4D&m.Z9cH,HCԢ{b G-JxD+@_$c%* _jR|\:t >jz(:W֪F\ǔz;m|Ȫ~_A7¸W`uYctmc .I1:Vk7m"cKӝNNǽJOG\VrFU5q:ǫf!NRT1.D(7.8;ІWTSÜ:OUS5^ga7NWQ`1LRx+$_D +'\ U>]BfL6鞞,+Bg#[3`pO^>e7Ds~|h!.5b-EDz3M8V|&jZzˁXnlwì:8Vc1S1:Wh#O?Nm' E [%W%nlꠧ1n<Y^llmb rY״͖jiTLC\?c%0s1:W_j´KxA|Hk6Jb̯["+BLu>Ɩ=xm][4AkE`AW/B$52F$Rv%`[ ضزEM_UA| m.' L,CEaSB]<^w%?-ڏ q`U"oRs?]\MkǴ`+Cc07Ks`hfCk0Q[csK?\g,\ rֲ[.x]io$BѽnTEf3J!dqDzn~+CX|,l_B'9Dcuu|~z+wGqȅlW11/$f*0@б zrt"?ߛ+qry??ڕx2;W`Vj5l0Vm_JL-yz2,Zlu]nc˝߂~[jRuo[|["w;?2Y#tV[GT F*OO '+EoWv4jwB}Ԏѹ .MVfz0ÏABF3O>7-|йJs V[~ۗ#r#r[Fw/Jl?J21&5I媒Ǜou{u{,wߍK벼\MQn%̈v޴kӔ2͆!`co|Lu_~^ǴT?cr_$d`3 P}U=̀*E Zj/u/V ?ç%ra>Ox!8+L򹷬4/.m0E(G-^倕#{ ӡ4i̎/ +Pwm_1~lQopҴ NnζY,JlU=d;#Ɏ&Q[7jԖp8o1U%h]2l9OG\'WHb hA'xGIXUg]OX&H0VX$_]id~jPpq#0.<"*D)E\i,2]icS-1VEd"HZHaQ2hʪޜ7w$RGKmX&Bd~j2O`R<**2jw;V3.uXHcUf΃ZjE(LM&N cUx׿*1>r,|KLl|D0Jg:شHq A=΄/)`8!vj2]RmlvI Wn_LYX,t_+c/C=4mi#zN~S)!w%{Nzٶ!NLBzwzke14[| BCkg0tGQckK$_ ݗר[p%8ZGc$1a E&ώ*#Q Ο'ySXjG̉3A>PaX F,iq6*nb?QiSX(=־^OWCe]*˟E:<<ϴBf+#),0Mv{%FoD!=sI$͛!OƁ'Eqj!PD$y2 UW%/ 0Qy!e,u_8€췄'fM+AyTj2E"/@DD/&Yդ%OAIț''(e'BN kR2l;Su\ɢO Ppxs]#rBf 1֡%_d%}`,g(G`u0u b8dM"!P2OoP$(5L'N^07<-I"3^InlZѧ7.cp&?!g?(|})7Dg? 0uO~=ߟ@, vw^ana>*MU,! .O{h3%wW"+m {]|FnxBtX 6R $.#8WޝҨ"?:O4 j:m]A$f+wq5 jpˏqYcC}DR\RR Yixv_#Mlf+m2xk y/zD׍oL!yٽRpY.VU}^T:D2%sd ^}@@ynNe[%ڨUeT<`:Q֮gD ]`症O*z\[6~e}oi {+lO_nR1Ŷ]fp{謂=i X &5(CA<Ӏ8]] ;(}h @w X>u@W7#".P.F\ ֚6o(Ltg{PFBOAE(~(Μߠ=2!C6C$?w ijOP] 0M`2 j߷m!/*b~]`!4A@v_~8_$6 ð Lޗ~O28(YV_bOVz7|k\c2Jĩ=h8j/wKg-Һx&!Q"!7#@!B_8  3^[ˀs(0S<%0!H6G(~|(y<^`WFQb·>[uEp X{GĂuɹCu\ f=c ǻW}j.0W;Cٻ޶$WvqwJMy1q&A͉$HɎ/~U$ՒeGv1s vl_U]/Vhy]b(ǓgU:a suħ5UDN["};ڍʤhWFH:W>ypp_]N KwlPv93uP{ b T0zI'2t#@<ڷ q[(Bķ, T*Y{VyD=ȷ8uP ?70=60혈'N'ŀ=k sWݟSP~D癕i[ ^?|8A|LO"X_&' 1gy%@dI>Vr `wv`8y$`thQl.Vл3-gqуa~ xnwr} 5s@X$uA,a/b } ` snҧc# :=P|}Q01`^ߺ\Dx>Q e` 0 fD,sGH'Ef(@gaO۷9{;(VnCH?^aϑG o/oX$\֣xXBa܄IݓJ2+Ի<'@gҥXJREzJbzP 9ݞ8hPg @J/ z?L t}ZK>V:6 Ki.42t#!=/0HѩJ}" D`Ӱ_7$Q\$Tۇ\7/ KM@9-֗Lj%/P"0ѭ Bt!zP'_,U7kD`Gcy(yu*BTz;}^L!g]wtD/ 0^"DNE>[\-Nޗoο %`ζ쥡 :0S,&+R^[$ .s#X.% gjsmY%y"r[_Z +dVh0gy7F=;ֹUD w: xnb$㲘kxI>֩`yE`M\!jmvCVg)OwթNN;&c%tr}JR\"lޞUCa\mF ąHMȓ;E)66w "Gpd>Xfq:Ch":!nkO5C`gp"wDbxފ٦ߜ'1y 7#ȸp\}3SpnLcqVZfHaH+ZW;5$`C`kW]O.cDWeYج,- @K4vݸa[xsd~gL(I*8Mt$Ү)UUWRJ=@Ofw"zyrvvt"c;"]r{yDda] m9mԳϦTJJ}W:8~2v:b#ƒptDy']=D&]#}n~r}>/`FC(gQFL7t>n"R{ѿHKzK(1pՐӴ}Yۛqכ)V+nʗ[X`uׇĸpTC2V6u}³FF&{f`sgp":w ]^-b }wf BAÕE{x4tP߲YZ/%pp$G/ɻ=AA.ߏ/o*>:KY`|-y wEUCVa>ѷ|~uq\\dz\fEv華SN $&@;(zo"2 q1Fp' l;",! #0 cw<^`{qh8qlNG.#IJ(KN7l7oͳdU.Y4H 5(yOe{8CO'aE@/qU?=Nn*# l&S'\7i&n^Ug=* xzgGݔٳe%-L8tvƑﲀ[̧#r{|fWF[wh[BNou 5z<8>`P]"w12.F4%CY1 wlnTi*hàDN%H-#kGhPNa/G>[;pvf֪…6`0KND4QlvC^mx$hߪͷN(L{qp"J((;ph ;窸Z)( 8=U|(\%n`-[㋝p+ŢN>\e2?)6z#k;6Dy]eA[ˠAT׽mE=.{6CmEZl-(>*:PQI&_~_2>5uB e#=P SvSIeUie;4'wG* |BuBuv'y uv ]'݁PwwB>PwBuBv'{ v T*v TNx⁄ w ߝPq$߁``B Gh@B w 4ܝq$4B.ym\4 Yq=#ҍݭ?6@`Sw>+'h>JL\&(t: Er/.$NU!r`Z1ɮIY\ Ќ: Lt=$*|-eoo]<:l/8.qZ,+WM_y˼|"2O˂Aej3 lJ^2([ AFփ'&9guϗ|" -T^("buٳPfя M?Ϫ,şe^e+f S(,En9-N5SVegS oUct:G{@`}}4Mb+,)FfɂbPEKEPѤ~j[8Vr IWCT<Ɇ LϠRZPCie״BٸiEv/V3RRSKj˚8s[JQclâ/s|e\ 2 q6pIezYѢ᮷zϠͯ^Ed8/`Κ,zV4[N&ad8S/_fӬ'6z?WGrx7tD`>!ˬ. WK&ʘ@aUkОP޾ͪe<œXƱk8d0s D<:~SIH/|v%8*:z!;@ ZHվ.3c{(i~t {çzmC%H2$2j4ɺ ?sy6Z 'Әj1=3'n;΂ MGƬǨHzOs8\hq{0ק2[nγZ@5s>EݹĜ@gpM9H<+RlPGǔJK 0#VI w+ 9^mUV([_zJa 5wP]+NB.=늍E,|%oZ-\&(s8ч^b>Te,6?]uf*x:1׏G) S3!bƼމ%RTt6Y`vW輒8`˧rځp=ѩ WN~j/9{`оw wWzhqQAF2mr1pI}.p!re7,fh@=d ~߄#|Ux"7deHY;^#Ƚp,Co.X9 AD `d'R"o>OpWA"?9AFQϲ5:5@KTE*ȍќ;H.4@}$}|uk C-@2/z}ݎZ[ sgv5E>վH{7;ik%T: eJ+Z뵒hJ.4 6lwP^E5iAꡞBfJ5Q/dOքbtp;:&uN%'1rIMW@k䵗 )' M1G/\OVNIxWjX[JP繝˙KŮ;JO qmgzZ/mHZ͎Ĥ6PH;`|;G@sǐr9B]#7{Qg ͘ 2Bŀ@|doc\7ȞomCYzB/w`uAUle``#V,=Wb|7I cW +#,] KD1T;fL,u$z)ٰ'ٰw4uwɩ|j>Z3ml}ζ\1+K@Dh nlp)Xɻy8Oٗdm,\=\ \p=*iUVaߚX -1jLP:&۬7ŷ"N5T~{?>LL 3'._&0fԬY EeQn] ~EC12tWwil## wZrcEڣ6; jja4wz1b2|ULUҞ8X*Kֿh\uЍes_whla!0Fod]VdcpL۸S vuBkPR\e@y>KC.C)U)lPW]XS,oz)Gx'^MR' OD1K(7≶'*kc If^9tdL5u$/C}esT> _wm 'U>'@_5(J&)iF>b2[^{@3|{}ǖ=ϗd|?L?Ԗޘʯ~n!z Ixoj7>?a>a'>=ͨ_3^ڬ|zP9|}Nj<6.|ثo?ZwO_mUK\~^Mgh&|nɯ.Ue.v͗r~vI?nTh'#~L"}e8ow7E:FHvW6ts/ѡB,/h*߲ŸſL!h\JKV 5+2t@B;f^˴cOʵEhɛ{2^fѢW cWʗKୋ&%O8x1>v;t.J91$މyy쁣cM2)x`1Xp)pH:18:1 [N)  IS E ܖvv_:@ʅBi~[ך I}D9<#sdj߁q`D r&< <$HU~ a{j50JmRrxr fGgC .gHɚ,> 0=|J_5$A%,rKIPļhogc2hйUҶT6h ^f)N ^oS=wco0Qqo*!K \K\*t44A?n=prlPc%J Z * ZOz1DcL?coz:W0f|`-1U^nDhz=pԃs$GRv=pjfT-aϼ)4M*re5EMwayF .䕊療g0PRf)0ĂEaqk:j4n*4V08c3⬹*?rǾ_'N/)=끝V;_+RGp ]U#R|i&,xiJ" Zv=ԶcR<3o9@e<"1Qpj&[HCAt:p)7Vk4Ei"vlsp܎=5aPЖ1;VPo$J /_dI YVtPJ,.p΅'g(N MFQ (_҄@3s97yim֥#V:tB);RAOT&RL8xmx`w,*m<S#H去{`(幸ڄW_{ _TuxuIԵ_qɒ'~seH.϶VnL7ˡ2ݖv+|W 1YIAn Zz3آtS ogH gfxbKALR$k#SEsoݦ SbWw;J { YV K6:c&䷹aAK-jvIU!g0iQzNoixl-bp셮yxjs1!`!Fsˬ&^ncN؛SH JaAD>_LF÷˟oe;$xF3T=̌6jILLbmU˚~r6qQN"EvvLH`h54k:sy: hwMLPƝ^}FIA7H}ah1r*dE"SssZ$h wpA̢E|t@2RNt3t.EY?2uomyjNB)K'$z#*R{ط}Vé$ v_a*-B5~_ǨCQB'M'H[7x\"*Q3-@( Tr!T֓ $kto;֑yS|YsgL6ZJ+ڑX/f!C Rb}L:"(E隡FQe {H|tԡiXK>,w笠~^4NF^_+@R-3TsD1p\l 7dۦi[F 5W yPZBNf՗8y̓|~=JRww7 }"Vy'XUzh1x`cҙ=fߋCćEn. [-.w #xx\K[ E҉88&Ev l0UpbGc`Ns_02v?o]qFw=Dp2Qsfpii n0bygv |eJjqpizv Q1aZ)3zn80kN\HtZ|]"e}ÑzcVO]pϜHDZzX˖ AzG%v0Թo>5c}i" Rn^\ҽ#9 uЁ4 QԿN}v(ʹY!%+==1SX?t氕vu{Ut+$R et#[zfb~sתZ)kUG8wqFGl:1 kDphUIcX&HPu\Eg,(nq>1KS'N՘@墦Og(e=tq&sYB)vrŽ3,xneԖA+&_g =BJ'XRE(me5b">wQQ u_6cD 8yЪ`xSPW&`NDMNrd^F) %1R>99zD2YC0KjEۇa9[Tm5{]-E[ c.)_]Y@yu ~0CILĠ`J}-0S=Nf9w~.LSzNs3/K(nZݵ>dgњE6$:ŵ9Orszhز:f4Ǜtm8r0u[ xfloB`÷U(Tb29<)N8Y-(+ߞSWs,"vM3(7k0\=~gj$aJʹYR{c/Tr6UֆkM`.NN׹/OZoϋEHYT0jELejǿ>Wݦl1x`dABA)XCrN/@siz: M|c^Wq5?% PsR 'yTגR*m2WyuVEM;yya}۞Z?8l? [+5z봴d<ouD^=7ZֺP{7XI"0zxxX]Ht=R{.LW pdW ԰ɖnqu| .h7Ů~ݭ-% HtU-[MnZEǟ*7Ќ…@SZ\4ˮc0.l^αSU\:ruoG˜%RX8D+g{I׽dn8u/.p{>,)IUJͪ֒:,1"kníI;qgO s~Njtv> RÍY3=s߶P:Յvfa}!ة\ yIpXyGe5qiig8 QϋupƓ tƤ^%+v|L=u(XXTwAF OxgZ<-R?R;VZaL*ƣ,<]][ D@?|42e0Q6SIzkw6;,# a ⽟@gK`xVU_ɇ j`!;!$y2HGxt@ӊ^}GNzeqd4O ~7~]N˩oG{6RYRԜ5HӟKG6,Io4v'[+HWQbT>,g~;*fH ?q+fъ''kL"@;bUSIIz$)XtnMVSH6,bS+ }),9ayB ~ΆQFb8Zbb2̦gc;ޞ~bm(Dl"ъԓ2k Mpg`kԕڐHR-|2 Lx<٠7zf3kd{ OǣJNOz>몸soZ@ϲsϯȮ7EKޛD<.#p9ei酃A;F˯}q1=uq0ɫJ4Ȃ"uâe~jhr}gI98>(G@q*1(0_T٫>jxRo,xsHqӬE i̜޸,ƪѨDd\ļk_5 jgt]kIvjSY:E9Uʚ2ŵZ_q:v]0T$M5Ѝ?kܾM$ c7 Vmr̮ւg鸲1˻w]ctjz欒r\d6Kqcg/Iw>#'9D΀\XW"g@-LIn+CU:^X 侖G;_ :Nn`L-d]. 픨uHT$X%N1-{j,uwpw8Vj-M:vs5%k7 l؆4[auLu7C2%×.+N\wH-B]bMx(Iݡya|ՈT %:fI4+𥅒xsZpnuyS4Лg_g0{Zom>b+OHFB /N71d|9;lMR|2ÓCl5mi;5J̜5`[DWmUpEvh[e2Pi&{=Қi`ݡ@%ƻô6d>u MMCXih]e-Wkd:ӷIx:.׵+cZji=剑Qq3;r˘Ս5+>rY#f>ci˟Fo|x3VcvG.X!̵T4uf -cZ/llXL-\!qȊd%ҷbOz]{M˄ WElw 6;9\JMWYEAsP.{ OUsk>XDC%ԉ\aN28uvW~fvv/Ժs1SI9?EwLm[w]g;Z9MeX<~bE(#~YYd%+C >,؟w~J%v4#/O|1-w')߹k0m;|b7(B bN"V2v&*~!a%Nvll zIஅ]X^.8߄LL%L bҙݙH.L }3 ZcbbLO%7P=ϘކObzILJ!L1 ͐Hbz; 2զA L̻XOe*ay r |~Y:3YY| Y/n=@fc+ CHliym d`$K*niܵQEs\)ld4n'Id1'V\rb|n34wJ # b1V<#687 01˃[dz-ĢE jp>&c\fCp:Wz&jk B+"9A'u0yJaJ=GN@>+GQ--EH'QLu.[q9=# EtH:0˙ TH V;բ;❯]RX'jdRѳ ZS̹cI &q* "MNn; |Kq趈2rn/_$(҇yeZ&r|xB/Sk.Rpb=, #$۬F,0#1Wߟ\0n/' _w` }Sp'!"x3ǭCNs:j9c^zB1[&FQ Xh,a FrQ.S0؍RrsakGZQ#>ho$5q>scGXNy%Q9q|Ǵu0X1E1O\*^Q0ڼ1*7w)pC@3`g5ӈgY{6+p;SC8N^#″ n4"JO")͐4FIWWUWUWUWUwF$V;mQTpOf DԔ}ILj.C D,;pɐ,ho.\}Z@b2G0c`M/΁E6X^n.(Ѿ,/@qe3 `k pUH~a j5|VKLDߢt sr;"&|ȘDqHyh~ަTJ7]o?|FCpC<,@h3D ܤ1<皆!&Z+ M ~iZ#R31?6Yaر.q$ cm$*4,N$'=:6%q#Cc2  &ls$ցTʉPF4v&JT[RIRX +JɒVH Q(t];)Cgְ .`4$r@V0uqx"QZHaa0 #3kC$2kLW/MFHS}3ff઎*7VZB{ >(^M: <. Z&`Lf(WMGQ$\* IVYz(GIYUg`iAO5Cl+J>pM +7A!G#i{5C+M0Aa0gN5؊Z+qYө20> vb=c.RrxQ7SU7_ F JEHFz.5={iH`#]X6554Z#η:\x1UCÉְ4Kn&yg*`S[x^6O2~SѫY|a Ê[km#T8* Rw|lW4v܆քbU9v:RlsqqѮ16t:n+{GO {_cЯ&Բ!U|@5RPo+q_[eFŠ{'c ?Cooz b^gUY4-)rr \?y5xvV)*BKpNS$"3TUZ7%?l5] VvD1fegc)1}j%-jQÛ`SXGm0g4Xv\e;h9l^ɕ&_d@@n%9ZVͼ҇P~#WtBj4 #x/ p{~za2r޲X _q8\xT z3I)԰> gTZ߉Vݛ-#5 }|$gGOG! ,Ѭ/0nNp•{?%>)[pn6 Ѓ݂z=f:]e|N ߂+ˇa):~2 6sZ ;xYүtw?y_ ̚oA6ߤ9嬆Ň)ArW>9^'5.Mߙ22MF6nv_H;쎱uL-xNO&S-:e|os^?-3.so(8u d;N~k; AA4$]L>X>Hxt“'wwq!_h-E]#ͮaZC6q+o'/4n1b7,~IVfv)YQF$=HwI+ObYǺ!zqur._WnV,OGF+w#Ou ;J"(LSOMw )OT+y|N Sh0Uvhk0)4 bm'3۱*Rl[X4p<:iӥ;P"U2($}X2_P<iaIl KSwgRM`Noϣ ^D6/'_4rᔳTw9rpEnHOe6yZٝ=)y44993&;r;87w&fEʇsS9 #$f5f*V$QcƒfJxK)' zIN3H *[ uCf2mkhB =*^5pPLV3VJ:2j(95*_[!:RRݹZi,n{h@X"挅5O |Ԗo-MkaQ1g7L!7&XIxKGY9uFZ"&&~ѧQ]ffxf]gDȝ&5d|Sz-ޯXZVT"xvg>JvMT^([~({L_ PdyH+Z\?pPG RI_꯳P rؖh0J|6Ebg^VwKЖ i%+( Ɲܡ>~[."Y^(t6 d_/FxTS=!'䮀Q)!";"^#C\v~10?!"oL;|WNa{@C81w?qV];P?](;ܥ_#eCZ+ +\ZU^[.VQ"hF^ ]̼4>@a[cD UB`r%ə=& W~ɩ䪹 @gpZxg+W grem0bEЗi:F"'nu0VsA{5z+>}lk6fȨf6_E%B lQ~XK/W֨~G;0+:e>6C0ZȿyS9b|²WM35]\ԍe%^OP$x3$GhX]!O%m6XJbqm5=e㴈W-"lhJy#Z]i+8`dv)wGO${E/j߭#rx@Zn[xxKCoKr* +sw9ϽM#]`I د`ߪܫK<v(Mh-ȼ[PBPܼXxu#j6Q sľR1,26"&|r̲L02"ED¤KFڙ6Gy}IiT#I[f$◻4]Q0vp[~Rw, ~ĺHX,Zvs@΁X䴀kY;+G? rߘu1'SaZ᠅}4WV8!4  |0E%BNs?^/[O5?1Ngyxcv6Kn.AVr7e}ws;£\8sDhd`w36?]йlLټ"-k5[c]~p zʻ=.`R2 wwu$*J[[q_InFi4oĐ ; TcG P hCAM4 11#%FVTǎ 7fNB 5frP\sOݵ6$ؓ~קYTEQw_*UQ,Um Nf+/-*&~o9%ux\v%\L{?O n`Wv "˝ _>}7i$~W/XbkPD6Iu8~=OUnRG۷V/ |h>hD9s_zZyy:=q|lOmザUvyϛ~>2{{۵H_OGxYV5 4P>F o?0Y)ԣ):_!1w4 G!eSv%=^o޿t 7r"8 دH/9X:`ޱurdqu6a m?V}h<??妕O#v㪰Lh"m*"(0Q'Ql?W?~%dAdB^x߀KN6JLcO w% $hZ$[`(~ 򱿅[lPqEmZPȉx+xh/D&H71$Z-+mŦ#N6"$(>p,c[>Vf:҂T ?JY E nt: U阋aRH71$Z-+m&vp ٚX޿ɼ!D J(K\=%[򑲕LMyFY3Lg,sio,%TBf27׻/?z؄s3%z" TN[WR&'chD[>Vz:?Y(&akL9<%R"& =XKDhf8(S%W;˭ۄlmK|tۓ>\_>@qz׬X/.. P{5êh3}j,pMB )qL"FFhk"+g n@vRq2%#zTRrlmI|t*b:C(8D& {Kgh*lj |P@R-ݦFM$<26_B\(jT|18UA"%E,sRqy\TfS6T`\*C// _b6zP*nXZ5rҸ L&$"kt NƸ78A-r"7t:+iٌH>?H :2I ߐ$,+RY[MURngm ƨc ^̡05 C ڒ5nWgIJ }ƶρZ>V)Ux9mc;D-8WfP4#A2ڈHêEy =3Nᒼv⍃ Mi X=zi1IQ6_y= |J8϶;Ϛ_' EOudtk0⟖YG#9kj%Jp"#Hk"s+I:ɟdZ>VzZXU]Q8MŨ34U`9?$)y[q[ c\ԗSQ2E("MS5h à97Rs*l!jX1 7IGťͤHCz9M65<"9Q9 ?'*XxF$8<Crn<ǻb}wr?n+1Nm=7WsN*9j<՜w: Qq)yw^xҔm=࣑\*\w;fg|l'&Є{Ub%d2B`# \$ٔCmrc;ab/}d\#hmf 2k:\6DE ?`rr@i9D\Tc5܉D{~^EJ 9 t;4Ӂ֓}^e3Rx4pmL+D0uNQm(BKVKml-)B̊x щ(SWW"bOJD-+OgWgGGXʒp*%IQ"%APi'ES, @-)TIqI*ţ!1*yjREASl3讝lm;}c;a椣Jt_Wpk-Ϛ9@kH;X5H 4 $@Dˑǹ- AݖZ>V&Z:LNQ77Oz3n$@Dds-qp"3(H/x9jX.^N<8 \:rk|6%nI7#4.D"W&hnU4* uW3V#U\Χ޿x\Bs J@yީ̀2>)8bl3UdH.\b >qR4V jXOX9C6nmڏU!qVf }f:gȩ nz5vL v9sL0 B67?2$pAܠrPpP-DhG Q ۱aVjRWkN:4~/^vl9釜* YzsFX|")->s)"67ے䌗xq ht!TB}swff_q=AI1ퟯ l%AWVM0ãb&f|NH,=ajhU/S h=85bK[1_t5zxT Ʃ\ f;ϹVB6P{EbZ4KECѮ8%Ӷ6j^Qˑ )oa[Qٔ~'T8u6#w a93rJc?jVp7)1V`v_c1j~͆0q…}p!&80to!o?8 ãjψ(-޽|9hã⁖Odf~ e0 ijh r>!KS7$n(wgԜtRƒ*e3k c/Q-]_\<ΨmT["gݠnud/XjŅz\QS [$ 8)BXgUTlm[~c+݄ o5?CY] j6cp"N2CM!EKJ Ju㬗4z|-/OjCT؏ńGPDV!J0l>aϨkI؁wKǗb0 ͢b-.|moeP3ã@*PԜn3Qk\,rg>8ãfc[M8ztVnݵ( ƒ"\`O]|n>y^lv/_?#ϋ-|< +yP῞J~jB#&Hڧ«D#gsND+&Dy;vCn+d &xapjGBWL˵^sm惞xߒ`.c=au HZbI -0Mr=%_**^%&/(ŷ Bp;]O4{1 &4,,+@ \yb3Xqx%wCzFi 鳹$0' Wã+A!ErA _d5s.ptecRqr &0m|tՄن ""]qDP3_s+Wj NGŰJOPF|WH!P{xTb,5]Jz"&L 1 ATlY' Thω4%T(xЅ.'"6=B@ㄽC [4 |=۠ AzxT|ΗV/u& AR)\DMƢJL- 6)v=$FhgQNJ͇eW^xyLH{Fӣ59jZg}埂 G.y5l%x D"^jKgh螫\cE;3OyI=p =K ^(# Ah/KQm=<*bF.@O&o@X'Ӹ^1H͉3y|͠R[E/YU -*|#p호So.y 63GōbJܯ+hV\ in$_Tm49>TcRHMG9r2"LVS~k,ւЬ?SzEU-C *oj&QqSк$T]ISci6*wۮ1g&mxרْ-ϓf[nۭ[H=CvAHԘ=4=CZ/.b5UU^ 5?$,Zs?Cl OP >& t ]|V'LlޝV\jFo&}g3)ԿLG6#"4;CRH. fz-HJql$ݽ~Eɲ[+IuAbX%>|"za-mj~ --#. t\r t"}UdW}!AB`uzVVT- <+TJ F: @ T-=|8"NT&'R^U.Gj\d=ldJ?,{NJltA:ㄖe9u0uˣl-Xx, yC 0A+AB]"MCeoSRnI(W7+vbYx_-Yܯ?מpG_a2dNL<gxUP@9V RX))I"cXc"B(2c7P0Ub`2Tx}B#J-E+ rMu, f8;v5q(Ӊn/3F_~:Ps6>(n˗1EZ7)Q/R` FSٔds`{NDJ b$!7\XIFx :d" j^>:aw*1[/ 3Q^]\'dAt'Unz J7E_E2 iK;oX>,lx &Qp~5A7~r4ZY*/p &-PϚˑxU4YW;aޘQ PbDA9,1Dw57|(ca"%mOI*1 }rquB utŘB%CasnuF"CEEcDsUs'G"CEFcDvU}$l+wWjvT(a!"Cc} n4e* d(\ܗPoQۡ"οF)柢KWWzQ9S!I (&#]g TaiSĭڊ֩1 n*h$ al4VBvf,` f2ZC(~}vw\!<>ZʈxۡɜImjIF@o֑3k5O›5u{!]a&c-$ۡ Ա~B\G#PG?Zc DubݢഛV6q#t/Zƴ.P%El@pn<0cdIKչ- iHEc۔9'\=z`Cg*\xOL%#&sa6eruxA|z HMݞ"ub H-$S06e2&^[02Ke%YR0|Se쫿…ioxΫƟmHؠU.Smk@]'ckC|qn@ Ա*:Lݱ5L:C)yX:9,[a)='^Q4]ḳ?Efڻb6UmUa;Iè G}-0ckJ>_A. Y R#l0wsLG<Aei׿tUTJcCMSUg\́ %^O=bL2턊]Xnm#Eb^2b%oIXPСHsES>d~?a; Ի4Ҋ oA)Ș(b;,7}&OTgRVh,#B/P:E@#2Ƹc]n1gˉ20M 2y{Sd08XZ/SND($^4:kv7x{nE^Oj1X`3PcLaom~=[4ʅ:@sQI)* Rlv9}r{nzL +wHwo, Pv6,er*l巄5ЃUߔ;F+[-beݬ?PշϦ宥۹uסËY j,~nSPWXMР>bnpˎQ2P>nKƃ/4.67f]3PFz׎ w_G;ݗO[`wDìNv8\&Oxf?/]}uߟ/ 7?Ս _;1Ht_9I/ex ڂfz陳6՚ WhC̲2u^ Θ"H\ #Si{ZgE,I ;lg aPTϖlSUn>ݵOv=͋d2T (kH* [˃$|6I? \twr,nqz֬q Ϭ e86dJ9ow9ӼL|_[M񃭮w\*vMu+~;XNvcV&.`RU\^\viH/`XU_3 'UJ^9Sk ڗa¿HUY,A%uaYF\PMّi;O yF4r&2<#gr =LVJƴy vcXУ&kAOI,ٌ BȲȈHY濐ħ4WNgebZ.I1r*M *u5 6M"ձd4b!Beu!1ySi3^'}%9n> 1czc޹B2;|ǃ55ŘxC)ɐ, Tqԃ%w <-h1ㅰ??]7x:7°SLA҈**~*d u4bD@Dy/thCLAg;x7T_ۏj=5m3gp6Ib~W9ɤF`p%Lg510_z fK1XMۛd&Y*Y͓^땠;Jw%yMr8{u 6k<HJo7rll3{E߼b\&z˯w=wly䛟K;2)ֳ{N‰orۊ2d!Of5|e4xƛF7/Y!#l"c6Ɖ'HG}cXM,? quܯ> u3a1&M?+x;9B J!|xyisy`"{JB-զO$GJHnTN֬FUND+8Ψ-en_>l!tN2&J~-Oo9bvn/)XzRʘh9+^N99?()e"R .腡)',O2*t\Yr 0TnOXàa)ZC7B=1+t?+nӦ$;q&:zۖ$ TPW/ w~A`rQ0`yF3*$ɒ 7 OG_6™[Dݭ?b+j~fzPcpnWt}&",tݹ|S0*xN }մRXuYWߤr6m;^YT.L^m+;y0P\os1 "qHc p rZ$ ~F1_}Jwv E's fc큾6IM2̪'P`]pOsr!]H{ݐw˹ wl]{}tߟ/gyrd좠%u4\DRG-/mf?e Xٶ(#?FF_{K!'F`*&HABqڄQ\d9.ߧ^[.gEFY^|\14kC(>@"LϦ_|;?|s{˝x{QijloiZ?͆CP?p<џ]"ȸܽv9Ȃړ4p2?&ӸJg7O/|-jkVڜ&Z+N\DKlCz<3ve4fi+/z{M^E}kpРp:-0g>:[]j ֞!lvq=7}E׻ r}A?  3$bA<,e` L}sЫr`+#"㨚@鸺; (ήa7M!P{S?dNF?]g2?c 8`)6` _b<Ѧ"t%i+BY9]NXuյ2P|`h)"M@H⽔Ԓ%c/qU|% Z{eh݊ oEYgPvB).KeUl8ĦNoWi܏{}#eڋ<1ĭ,@/> !.wjs;TfxU!$3GxnzP^wJ ~⴪JUG.C6kTvڨ$8DATIFuMK_ah!I%=[0 bAvB< % ,н_;]\+g3dK)є?کxvsl%w%) d@@^xh([v"ǼdSk [90t9;_p)6u#j"~|:Џ&L _6 'fS;mF%8KJe$W&as[#!eSsvl!K8TC*s*sA+JonܦUHO$_(a[1c̣Geltg-d/]w8ĪF16d笷s`tܸ(Qٝጢ*;$v6"I(N dMTLyiA Ul0@ >(T0R V{ҹ(: [ : 2!"d2eخN(^C/t [Ws}`+`T|IQLL rvfpMhƄ<u<K,E 2wƦD6pnPtm30wՎ;VcsE:x{좖1؀SJm/d.Dj|NZo t(ܽpx]"rSG $ غTޔP&"O`Sxo tϋC?G,ܪyP{99vթ>: uuK.t26a< ^PU7g-Ӛ `1$1wu`s F Zқ,Ag/Q2|e ÏUTu\e]+Vy(Zr5YʹgppAʕs0O@Eѻ?Ucsocs8[()'bY9*w/U8;>˅XUV8ZIٍ=s|8(-(r:oYQŽM ReF]6u61y ΁ kuV@R2b _ŹPFzbΚ YOx4C1`nn]vM@%xd*-d2GĒ*DHIJh`(D"y @FZ,_zhW2qEZ+8 M=T{q&; ,٦ [Ӹn;eNLX@@-k~B@qBa {օhkz/<)3<)%utϠ w,r5Н;w/ *Ġq9.g$S q&ᘡ)FF"B&!-(7 F^Yy%̜EN CW9f:GgAabszO;Adph Re,뀖z<{\\-lސ73"ϹaX679юe?~&~y o]'}4AhAZ$̣lNf 3κ0מ5QkI+w X,gBS1Jq`\qc"0*GeH 7}eC;O[1x+t z:Y6GOR[J ;߄eVN_#]N=篎g{в6bsn>!mLDN3Y%NˀS<2L=΁H U>ikYۢBFt}- &W\ jBYl)@uaΥ~ʤ;asXIM3lꛛ0$!+VLꭒDUU5c* `sGcوUcy$J 6үYN& "͞c[Lc[5DoI OU]~8Ԝ)ۯF`罛ۢh &î%~t&__7-k| ٜ:@@~d4\SiӲeuyP]ǏL/- ^_3y]'}u'mM&,WsRI-) ǃsj1-:fCaC$YT`2Q)}U纶%F 0wg7ϵ?WD+ +2Β'aw ~OY1JnHqf(\\y|s3woŃǚD'5DZkøVQ|:{R :mպۨ*p@$ 2KzSxx\a1@IuP`w(qqx3H.jvxz^0*4YifӴ!f(1KgNH)*"T|y] /,< Ҽ ]ԃ~JĽHǀeTAU9%| ^͂Vrxo]:tHe4r3 t qS`è"/9Kkg"fB?{F0|?s|J(J&iH(%FEumRDeTfd{B Aoy\Fx (>cvM37\ )`s'~lF'!=O<=vqcǙ9wW(4%Τ 6?IFyAq v4'AG#<Ofw[?( uzaC;O"x.Ix4$L䩺ŖASD [y텡3Gl ƾx?dY:mWJA W CDKi4}{L+cƚqsCHln_>f55y^jUd/*npWݦRf˖Cq—-`O#CS;|hu[xtw8LJTn{A˴cіx'LɋM=gSb<}CÃm{0o0ćҖ{H?Buk7Z a)1ZRHRlS{opj&.y>('ׯ_r=!|66HUO%?Fj[Ug'fC⠬ D. U *Iq4z{)4Ysg32V7: 'BRI)(2,p+7D߼=qz]_XeQB[qǏyYpQ)ɤ"l.9(m<ǒJ,ۦUd4R#L8Ӹ ђrܞH +*gЀ_R?MWeQPj/z+K ViR%j0M& v2c|vH7A>O:)֤(Ks)RO%m,\٤-+ aCtr uUTL26Df)0sZxH+ܙ˃/*10ߝj8[\}Dȗ3 4! G+Gxɉ3NΕ&? /2$Ry32ZL g_Ԇy8ُ9SfI>Rf <o/tfsvH;W/pے9ڼ~)(#3RVeB=Eɂ;ji=V",NFJ"KqD*jՆ.UN.(pűbdxLX &Y&B{NZ|Fr]h-yk -,kq7bKz(,~r +0̰KD4yn] tꅅ.y#_XԜټ6/BvD-5c#(4X. xŵq^o.AAk/ xtc\5=`m^5oQ P"]ЃS{P}IU^co@szD͌62Ōwhc FJHO.G #Ӧh,x/e,Rk8SY!bp.ijHQbur*02XoLCoO?_'I}" $ .b Kޯ6pyqkبjAῡOmgq3ze!׷8Wۦ՗׭р> w>oKs1ߋG}%SPip@E _!C5q#߲1ZX֞`fCqۂ!9l= #GPhx[;?!Я$| S$r)D\p-8teuvVgOo}+#Te\9Ѣm.ƹ".h)U4L k_S=1zysd𳷋rkar_ 6UhL#~l#>}7!dϨA;LX2 %)Tjgz*Dem*r>b"(0B~d+XDۛf:('?aVs+b^b#'4p hZL!pAp,y[h- k+ڏa3TE| W9oy:W%Z+୉D&;׸?ghG M۩=\]V$p٫/rb+m[BdK~q.jaFnr9YM^8qPGU bVk̈́Q&0\Qzxm;hx^0#UQYi@=L,YZGlytN+,mB#7 Y ;Х>cxf`Z F ܐkvZi'&huIu#I_wxv&N<;O&ՏK)r$mL uQU8>tGghcsm $z{YXF4Iܸ"&Ʉ%8Yw%NJ\}y!ͨ!`\x16E$ԫN bhpb$kQ2"WRfKط׶< ǰ _5M\x"Jp"c!ٸ-I)V O'1LjI*݈L%iNGn& ,b3o VBYm °L)#dzg4ˋzYO|E08z'l/ޥW AI lxAyto_KlGgHY3yGoE,}ztmno׫5#h3t~g4  =N"ϧbzM8NYU4UѴG~ kG5%noMަqdGgޣ9::ޞ,;N~_5"1rBЦIyG 6&wAgjG&yv0xdG_0[jWHL*jhYK >G-hlAYrUL~8@7;QŴ#* ,h'M1c&OV@;ۥF5᧻q1w\ωo7 0΢CuVaRdO3-z=eKT|M LyKs+oriGxh`߿3ULiyNp\X8"exn"09Nzbm.G̼Hi dcL9=.+#BfSdKnX;:*<ڢv\ђ#!"m](8i7i~Gˑ~Gx=G!dNm{^Lrm )Km-8刲V-`Z ]MqZeIyeR+Hʡ*k&JʁtNϤ#sGgXqo tμvh^.G a|՝(+}N8j2qUv(\RYRQ#`y"6rZ"yE$yb2bP0 U4pſlTvng!w#s+9]Smb,389SZA.{zG#L7ngߣ<)[|P~ x>xs^PC:˦A, f, ,A)|O6~O{;KCz%jD35oG_.^,w"Ss׊Y8>Ee\DVBSV8LX% &Ft Їv ,1aUc2lPQ؄kF Y XWA_(}CHxtN{{{D!f.Y8=^պ#0<: wW0U<: مbDdn_!(jy#/IwoX~H ^&#7şeĆAU ,^8/ TU±ml+ݯ A/ws=uŨ{^+-Bn(y␛nUʋuWǵ ]1wHGl37MFξ"]iF)J!! ۗS˱ܙ :-u2|,Cxbs]t&8e,SPɯɺ 8=o/gimYuVXjDmr >p7U-`_,xn?Y!Nt~+GF[]O]1 QδOH9CTƔ# A `|Y`wm 2ŮZL=9CT0r<]\S?@*=U~gȳJS& 72Pkdܨ3dڧ~ߗ#1PF L5|<- Z4[7ekѯ=8!# {; D6gq6ƋAwmts{Ƒ a@FCqHw q.ƂOgd8CwSMImf]=UݿRޱ#N&Y9N1'_]dueK,DV@ny+ITމ iN{dgʯ-p8+IO(=r+x@r2@&9 i6x;b,ϐYY|6?%qL,DgI_bDB ?/[ƑGq=?+mNosXmyK<{@Œ--mwptCٵj*1LT1``ON,W9%ѫ5G ]@֠ŰoZ9Ք9w4r¨A:vw@,fKGR>nt@6<8eUA9eq JbJ?4o|i0Hm? dG7ϣ H"]w/_ƥ [Ygyi;3{;k Dn'1/Va>ns(`?c!33(`2UgB|3ew3'a`Zf{ӄك<m~b8nz`{?~owfdGuoP)nYڣ⺺N_aͼ7/MLL/+oI0^Lq0yf0^i@pVWbeP?ISKJQ~ cɖi>|E?O?7f|;47z/A} ~#-m7-| sZ&Y;W-꛺y=6w$Crt}}?Lr [l{^ J<f{}}:&}8_ Nϥe㽁%#M_t: }ݟ&`O/w7t*qNE2& ]z<L&{Z4jza[O׼IͮJmPHs6}'A}3̙wqݻ ?N:oZh\wm}~^$ۖnսOLA_x_Ku1[Sm;k4Wxy5~Z졁}g} ՙ%}̟a5/'^BDeNnMӌn`RKF&yQyX;u?zu&XxIk8^ rLzbv˦p4F8AaGY?x22JTxxhB$)|vns('ns(`a]Z F*42Q [M`; %\rRf>B5:PQ ]@:4+<QE)0C_NJ M0&n-s_}34[cR;ĬW逰  3(`^seI %ת#'d 01u>ns(a^:SiJ0Oʇe~ %H{ϠPypi?]8:yjs RGlMP{ϠPyŋ3jJ0%)s~79J0/:_7/Z퍚dԕM @ÜQ-Fƒ Eyqv}u!r]z%8xsL(QmGs+:YqGk0\'B"ىD'޺}nd[<}NU9>Sr>E[)8_[W%f܆Hq#'qAv9|"̫gU|+taR*[WQMeqp<`5 {9J0Wb>#)3(<ט'iMt31^\$aَ+gEg$M MHCOxDNgK|||||||t5bJo)$;\6 bJ`#N8caV8xhb]H! 1B[kЛfXGH_@ujc=_6?xl02d+\[Avnܫis8Q<" VP`5TYcGdcDFi'}D0!S&aJ;f2Rʃp-SԀGg[q3wg =ݛ? qF4.]Ì%^nwͯ#ϖԓ`iJÑNyX#MLm$n'zˆ-i!9ɫ]:%֖ ; 5;DEV$ߛ"FRr׬k|٣sIY q54ƙTxG:"9̰4F%hD0AP4@2AjgѹC(0H=ɞ3?+늲?),uɬ&0{"o^ Ia$(Ia0xAtgTKS"91u1` cQ M\ #&xHS)!Y*6w76"<9!qٻ8s ZޥT n:o>x裡KNupw񭺻On0oث5_w:7 / z`PĮօ}HN;+0@8"aPRQ/L][xQ7^f+1#:mq`,[U*l..?@ۙ(hF$$#.{|9mUd//wJqӯg<5ٮЛ?VY#0+ɩ7%K6׳IsЮ՗+;J iOg~̨n~$#i+@=R[)ZV6$/+|OmޫӀmفk~*NeC Xlm6$ =C. ƘcU3K8aY[EH(򕖜UNgAf$:L>3r] 5_XM-KlF<1Loƒ8uMSm41imT6=Ps9fSxe}~_bq= b(USWtGGF v>RmBp͋OKo`~vj3va!*NlC@;)R+ D>.C fdYVyc,Ug Ga`I9A{ %N)EZZY D[oyp6M)oWSfܸ7V@`0QfMtxXpX*`P$w?{Ƒ~ `9=\$8 OYRb!)"!9.pU_UW:TXbc_4ڕD]֝rEfU~iF.Z2enNyCV?/jd/52ߗK52WeFF Jb [¥(Ib<QD3#W>,&&:/:JƏ}2 zc&A<|7Oo&Uj/]}6Hh&#:SH$Dx`đ1 đHđHđHđHđHđHCqGyđGyđGyđGqfj-Aqnj@qGVGqGcGqGq̽Aq:T!]ԟ#,}p{%ͤZ8js]x\ yq州>xB$I/Y3e3%l8SV[BD*[G@Cu9_Cu9_G<к=]GORRƹt["RBc*@=Ƙ24 |E9qt= S;U"J;ʲ@>~,=N d@S6/60àYZ&zrvat^AH[}d$tNTZbk/s! y Bg-XO>.OXsI -IJ"B8K% NL<aI9k;y%R,7=JjbE {P#jƎ$Cv }a6jѴ CYXx1*i?_b.S&+eK1wkY[aI H\oa_T{LǛPtL$ZNl]_Kͨfq52M]̕dP}?.学4&eI(4:ŨfTV"էWbl=4AP}/ۡcbzUًA[kXݐOMEo+RYhd>Ev.$p!n~Q [ _V.G+6%m2ʂњE6ĺ]nNSLds69 F1<}pf˧c ˧ctX>˧?go-|:5m>Yzw)v8nJo^tyV(eH QZ:죬 ɦ-]' bd.ooNBUXJƵ=nq=5{ظ>}jfLMUi-$˸.;ycfŶR<' 7IVcEO]Ծs9F5gߧZg^EGrI6fCԢoW8|D7$tyS1l"s R:&qޭeWҼaVd4פl.f"r(k|YM1x6:OȚۊCiZbSSu- :ǟ9k|j@R85zV/HƴQN(J9=/o~'t͛V/+ib[@}{IUy$Tm2p YTlŧU 9ڝ!z۹w0`nJzɇp>*;?ݬ_6IۋЇȿM_{ jvJ8Bra>'7?|//U=_['o=v=yy$ݫ90eoOOldp(,BOtCb<Q|ea՞FhNL󈁴{h >gtWy.CJ-Z%-Q0fmE̹]& w C80Žl,|̐2V4. 5cڅm˘wOYsFă kp]@pTIk81ppQ `]G1õ:Qy"P[τsHz~CJ(4& ɬ$XE演D/+^r,G  vV9 . 9X`sSJs ZyxH'F\1 Tyyyyqx="OPc}r;Xc}rO~T*ݍHp0vPO`HZ[{vY~(tT_r<ܜ5ж_j [peyZ^CM4_bWޥ̡q?9ŚXkv?-v?Q_kh\ɩdV`u (Kt0Dː>hbjfߩApjKs$Wo͹}ݴʑAҒQaIo7~Dq4m}Et[_>MNi^¢|dGjjp)S-r& YOʔBupj-QX#vR /_uPrem_v"o݈i a * єjЈ@hnQ4n_}l C:4+b:>;Ce]ğq MXJ2$di \y͚Qg4+uɕIڄ _")0D˜qʕ.%0bU)H%C$$9CES1LxEģ*yT Z$NX *Ʌ,$dCò-7%%6۞+BBIC4C劒]DYF J/Y:l3Z9[DUei;\˓U9 /G(_P%JA [_VHݚdGԛLH7_gw4tHG7[Z[\>ߵ$nA5`%`/ uػ?t2T$qVQ1Nj#B!񌦲OWp2f|0hM0~ptЄ&E-$꘹I9yic:+& $%hKKrrm4DJrL"i%Y4ҥhϘ܍d|6 Wphl|vC'r&TpFX%j2&N2 2Ht9_m4DQuYes1' 9\:C3Đ>@xttkB]$ITHIm|&PBFyXKڽRr 3-Kev\_c@;={@R0iѹU6Kt>k/GB:8#ұR4Lе? manK*)f-9TXFgtCz O 3fCMb@h3~EYPˏ= p3"γd"zh].'OpYjp.EfAUY !#zj1!35eZi3Cs$Y'kvggYµQA9A92{ d@D|aq$so8RZguYn ֓Kӥ\RBKkR2D'Rȣ}#H9S)2FXRZk4Z?! QywgU-A9ʏ. 6Xf$F 1m3##!!!!!!C"?$C"?$C"?${R0#3: 5d soC(BJ#!!}@H?{WƍBxǥ}0fvv@ 0qlՒ"ۃ$:mIu|G>Op)R!R!T/KR!T/sc=T/AT/KR!T/Ku[V;@Xɰi/9v $g\Kr1a] tQTwކX;I5^E%JFɂ*!L1r` *LyKK2NF% -t֜|tX ,g!}m G嬾tUiϿ~]Cw9V ^:eQT`F(fx!9xќ#$-W'mg5z$FݛӟoW%6d3c Ů![7~o2h]o l]?au@Ϟn§ɠ<2V!紏h'׿>MR{xUH>=%I~Si|0 Cj9x j{٫PIKu(z}g~3Њr4įCgFTqi60LSMJoll-k\-׿\p,v8zmkVx=hVg^=}U+TY>8F8=jftԈ5tY=p/:|y2-ZFW x}O?I? Aup)>ٿ^8:tWfk兙E\h(s> # 5μR:0Q3)g?L eL,S1Do-SDo,eL-eL2[&zDo-eL %zDo-eL9~n> ?(>7~è4@xUrsGòyV&\O3ƧJ1B*'W,>nu*qb&N}>pEfBCt3n&DH7f"|PyJ/D$q8UJFG$N$Z %pHV"J$ZD+h%DMh=㹏$/1t2b)ڼ "kY31#LI)y6:={+~gEf1ΊJH0d;tn֍//ǣej&?JQ/-`A)YFeQ@].Dy Zzhߦ#yV[}ys7}w,WˉuZŚ}u ";-ɮxhYP>IϛސR¥ZNU;Cc2ls_AfirJz-dXi8@Afc'U'Dǎ"Zɉ:fl2f|0{o,5#:`Ɛ|kĸ ňnRyIǏ՛3[mQPEߎi|Fހ[E1qR|浮f2?u} RGD;{l*(MynrBi&ǖ @Yodm 2 sg`=yy׆7Ic#T:H}hfgTu? Cݰѻ~5QfLT4Hr2Q#RMRFevpQXӖ/3y;t^cC rfp-ܰc:v"i~Iڪ }c0tQ#LOᚧ`2OuoE"OoMqѫ2YUfKY.&:okV:hVJW/ks~-MV 7q^W3>(/Zn{CyN_>k2A$.|DRBȭģ]땐ݬc6Sfw1O@l˕ޖuA}Z'fkMS ti\ޠi[@[6Fgf0PIËgY%ggJ͞?H3 ˟F1"-/ݾlظ.W!| X3̂|Wlg[0gl@e?_!FUeAh^p?MWlf^W鐦aX?:˂aTFw2'< YP{ x]n1#=,ikW^ z]}W'(OlHeli͆ߜ"kCpϲ`gAÍ]v-Xa])„ujP٭!]z25q2}ϓ~-]ዞKh߯w?_XS;k!;M~h_3A@\xѰ"P(ɝ'%EP&~2}*zQ+/T. `~ذI@U}Z`]Ccqz֦bBCib}kMtkXf2A0ɵ r B($<1 M欠^1Tk%i2S7L50&X>Pv1],kUʲۛ*Vakj*\޽@c/"!Sˏ]&`x9# p¬aڗ| &b be؎F(c UD*E\N ]қ<#ۻ7;!u+tn%֖P`vLk$wh(0a`ZHg$C d cii{3RTxG^KIE$s 3 wtˀCh A4XSOû0F D KL%F2 xOmˀ=a")l! 60x07%X aCrԁ!+`8&[%08bw* *(7A!&C֐v-7asɣ{D*C!.2 e[4 4ٽ, p Pp.$Y\fqq*em伮Z4*B,k< m]ZV*v2.׽Ĩ@bYoqY (_^ Z!\dW`WQ EEf/ZVx҇go?ų=}=:ϗw5_oo[/g~`M}|Q>3 nXɷ / z`P.6$EȌp/.!QdxMPG,\le/ u^35iP!r*貹3 UmaTQ,`qOcuQϣFOL9UWr|T~χTj[ތ5҃AHo@%b,/ʦ}\u%7x^m%u=Q%uhL H.C˙O / ^oʼk,/ckg _KًFsvќ^CDe1YTxPxF.]E !F p%c~3P=q G an'.scQ^PF5Jk-NVga~59Q}w4"L'"".ZU)؞q9)9ZO'T=8e qa^uzyry=H `ॳ+6߿ So6޶tֈ?,U` 35Z۬/i]oysP1jnbWFrrJ˙$B܅v^a'EI,o5IْXe'ERduwuSŮH7]0 F^S$(],A -ڨDxi23Ń| 5HߝņC]Z) Zz͐ @C i7W0#QC]6dhh Mz6R#vGVcolpbQxN8'"YE,:'r$sƎF{u zjI XW7ͺqFl:6G-v숥ꖁn&ĕ8]㕜LI3b&]̤t13[eLI3bVLIDʤt1.fLI3b&]̤eLI3b&]̤y (`W/o9֖( ΓēFQ%8}[:o9(5yTJ4ԝD8~G^HUr'$裋ڨVACQ{0rHRIpxaR[ERcib A_k}sj<|yvf ; gӀ%q'2HtB,= 1;HmϹALyB3EIXОyOeQD1>r1|F#VyցBplBR RYU4.Ť, &;6)$5 D4Vh]Jy79v~+4 Niiy)i$pE+0 1nCy=92~6oe;\ k6Eps0(OTO7 4"V6*&Xڒ-Jpc M0&snAQzY+Sie*TZ]fֺ> gƬfuE;&C>U3͉kPtE6S6%zC%ZCD{:T }N}R r0 ތ'1POSuɬb 䌕ܐXny_3/]a*ʫ&13#S m|M4ss,IMę]c+zO:3w\!UA%m9-~+0gWOkRSzP-Z5 V TiW.!cFY1waʽ3ø)aM?  cn~8GcCc3fׯ%k=i:ҧA `xުC@=DBS/hV_a ;ĤDYh&`H.}H$_|$_.ɗK|$_ӇiΈOܸ{VLڕ_b_ix`,ڨDxi23A¼kq7<@]ps8Pg9RI-Z-PrP 4*nye :q1mAƳvzbqjYtr8p+l>\Ն.V8l%,'txcFG'BG)|zSoC䌂}-Ĕ#"ᯠ(ӂ#р6bI!(*QT2NJF 2FF%T8 ySDcL5DpCLDB,,xG 4- &NFOAz3ejLhct=`8{7ŗ]c)iv0/Rw*0#3< a5rN7:e `AΪy7y84Mk*|گ^cF6]98k(vـos"&$' HlM9L0+g 0S]K)EԒV&b8GXmߩ|[hHfCi;wY ᖥ4C@*gLYPW먘ϋvtïj5eGp(^cX؁3`jhSUUQ"H׼,1|j\#|*k[0q_rѯjzUٛ%_*rX4z~;pZO 4_nxcXG'/8TY1*V(r>"- OI{_QTA}:R@7vtAcV= ,ZE?Z.!J0Pe4?嗟cыGRh狷o^/^HW3oCſ{JP"#x~zEQGF^sXu?VҸrrg/ :#٢=IH"֩zaPd4W3yaL"NFȦխ݈/`~M{Csܖ] ׿ʛ ]C6| a x]+Ap&ָRäW$ҏF3gnrS;݈m4֥l kB/ O5Hhڒ]˕H|I{Xm {);)4~l[EfpĂ+ȯ?8oD؞Jv-DJtwcLjrĘ`|\:vY} y@ǕKĒHnD$=aw++#ĭ8DbJȑd.t F7\jڍD=DdƨF6M5>vY>9pyd#Nn |h0]irH;牟iArk*; 0+=DnɈV1fL L#2qD&yp4&ќXARNʉU9*'VĪX!!(ڇTsMMZ dc µQN#R8[*L qYۡ1ic%?J?Ҿ.t*Y6{tj ~T k9 ( \+(AQP0Zrƈ\N#u+¥&LI8FҎY q-1Kȳce;VnMP+FΓ$}/QlDӀ9&Km~DUmjөݷNn082,pW,nX[fB3"Rk$wh3K`X=֞sWtnp5qg_ÖeԨHəFTbBSl 9'(RnB#wHi[1EKT['.H9ˬQ+Y84m[ȲP$o (nFd{x5e3&BU/]j<ߧ)NSl*,ai"M1|{ v<UiÉ6͢ O[_TE(GMK9/ 3p[ѕ,*6oHK*BtY6/365MjMi#[{Uٮ\+@jbŭ21HDx9?uXH`\Y1XcB ֻ!66ktQ2@ӱqtYR8yhc5>_|kz5>26o̍z]uflGSzSBg7uE]%Ii#; ܽvC# Sl.,ꙙNåܞMg].sJwvD-6ekCRrZoEБ}QNI~_F&ש k豩j_?lӤ{\(x6A'J@%okJ^F\s\>puoEbA*TL§+I}rM:\oOrI0=a4h*-U&*M8^EB"_9|\4Gs4's#&-ˢ{58rC \(/ݢ9))FsA!d_?2!iO;Baʼn A0X0oQ7{WH_wY}A.0bV[ݒ\j`}I]NI))e)=jY&3#/nu&`*,,癟~ğke;[f EEcO| ?Y#q;/rZl`pR'R>{|JIRu:]ciCH3D)#O/GZnE2ȭ i,AV1 1i-LK>(!bBA}>s檌Ǜhy+ޡ$sNpT /\ FDBhjI@)LRN+29Xh0jyg /fwe{/V&A4nޤr^e䷮}V37Ng x'tKY(h ֆ1HHñ:] o7]zq~_9aBTeЗ{ nnվ}#"}&CR~8N(E`K: D K87'GA(Q![m8T\Uܷ^ί/0UX9$LKVĬHz20qTS挅kbzI[x஻ͬB3ҨjbNtN^݁y܁#f Q*Ő XKR믖֒ZZKRkIj-I%$֒obגhZZKRkIj-I%$/J,!o }<ͻR!EmJ5@&5^@c8hb3ymz3h.O^lsa8|1 jX0um91*a#5%0*e}JṬڷ|`R;0i׸!HX=u$[}(eOvf2&fo!E}s1;[IJ荺1|q/%cADl|W*qvy3gW̳,N =9i7n3HPt9]cm9jSut를dF߾PYrݥ0uo0TtbXس;HwԻUdRC AF+{:ͅ /z\ҬR=rSҳJC ``ާNZDŋVv]q'8Qu|p;ƭO0JPٟyD[־ڭy05.֯o rܘbC4*<I E(aűkI<r =wH}:}_m(CZݏserq FWq4ҞP-bVK(e 1(B㈷ꘁU֣_8;Ԛ/)%Qfk8s.ESsf(gIc@Aj$pj#ơZB y%; ˋ_UAc吔VQ1,6@4o &ieՔ9[a'! "Au [?>nۺwB ~sGVmH<>V!J.d1HEΎFO`I;+$MouB)y{v}&m'YڎSC;酌}~iղ4M3wGcɯo5˯av7峈}||V2G$}-d[͌8 4ѠXvҸ5 LCUrsr [:0T44 A0D0gG)1HĨw)6tW;9y#K˵ipt$ h=KMxڵ*W# ]%E2`V`XUB EB2w/GIW驇:J GL͡LDj+r;.v`"FGRRI?j3Pf9_#f|4Rj)O_9@rE-j@JJ0 8'f$݆KjyĽ1\TNt:t9g˴fؽe:pVHu)Hf i!qprܗPga"\uo7PRu{"/#+ys2[vsoKxHh _, T-")ghSo$|#7%}%W$T}H {1X,̉&4v p*T :X!Ygjճ.F8{ fV)g&o5^NhVJ,[-(]|0 DjD/۶)[}C( i $7rpyt| x>%{HhȄ h [-xh2f>nˆc}Qe6{+95Ǿ*[}(GT6n"6}N_6Ս )- OOJ\'þQ^򗶜ƒ$K9-_2!2O'zs#R1, ?Xɝq;[ Fm3Mj/ooEdZБt x9ɢ|m,>dJ&TPC.!ʶ-H@MT_i`zF<ɏ?# |Y RmBDWTՉ3Gn)D~|6߻{;7ʪbe+Vo].Iq D:l3(yeyI`;۵!U CR6FJw:黋3Q0%?YygIu5zNmGjGX/yL*&͵KGj9rA{ ɫWk='NyB^Tm  4FV ZkuE"t[}z^R^3]NĒ{ !B·z4LE &yn9cWXf>"Jul9Ժà%$Vu"Jl_(@eK֪s gmHW{! `9^[HH,S$3C/-;aJ驩Fmħ4Cׄ81-*[,)H 9pF9p?:Q۹t|O鉒%6\1]Wh8<k-21;sBQ`S+J#~uk:T$@rMޔiXK0Ln[ժ _ףыѰ4}΢~ˠo߃Ԥ [^?ৃ=Uvddm a8 naQ=4_#kJa~<=' `tDj)!sMDFzos9mp sy2ڟm.K*`-Rvo=5wiYh&7]w;A9.z2(f30k5IDk1hFs+%bޟ ￙3F+!_cG5h-2\ӡٚkE-0 WЌi5}qxe|oY׋ ܘX#[O&m 9.xԯUuk%cJmR|F_o%c7ṯ@$C=DHnEԷ$p=[Bv .۩hn#列j6› c&G 3/j/1a8uh GsM'E]};}|/,{]Ҵ5s PbNIT;VEwݚ`'f{UBfOtռ ֤`":_UZ[i`7t衐9w/Gp=,y+ ja eU'G(: 5^3L`oFYՉm=~yי0i9p^ (?r\]N;ពvѴ|Q ]]ؼ^#k_nL>&l8*PfV=YzYv@tH儽ƲGp_^J>̱肠` ĹAZ.@f"Mo0p=tB"P(ȀEb  [b9xB_}L\=~ F' w= 9fb{8t+"9QE/u4̃aâPzpH*`\EmrCQ{0rHRIpxa&@(B; E N&x7F>3:c}n0-xǣAݵ_˥]7\5®En¤M'nS;_x`+e|`GҔ0bZY*A^*.z"D'#!`Qe (/ |BL¨1jveCʃp-[ 8z<; =MEȶ_HB{,D$9vAu4`4x4J頍)Ք3QCsL6jVJKdXD} -JET1#nsjU =!aL.=[R-ȧM9~rO6A`eϦg2}tLhfP,[Ƣc\>Ƃh LqY̙C:<NcʠЖ7N<8~#c=&hFU@K_¾\(wp)BSϘ8PBd:FY^1Tk%i7P=LnC3bkC׭0ճ?7seh:oB65Jې Fi0Jh&0'_|'˖"W]{lzl~߲X9a[(SB j3Zh钟g'e\o@3ϗ%Yx@t+\V![e?ßMd)ZYe <{9 3va>^'zj"Tҝlgag m`Z3zJ4BVVx2J(dLwi6z s`eUeYAN1j5 XײF`_IA8JKįdHmQ*JY{DQ0J=a.mJ)!``2LZ82kHKOUп>o+a 'YL'ճK6ϲzg5-^Z7}+qr~?j uHivoͨ=+|7C7'xII:Lڻ_+gHjV'`3b3r}?kH/bYhrܿm?(ѹ$7_bʼn80$5AZX,_vGEo_> KKB9U ˽)w >8}{+_zQdM(֢#brK%bd\C5sf"HQdkҵt-UvףZp+8b3~Ub-`>OP fh/#XHHLBDNcγW$a iU30Sa2 LYGa9A!s-!\0G6Hc!aB,g;W$cZad`(1&%#1 )DXҠs0I̥c؆YΒvSJ-a'#"B_Xι7%F+NsNXӌ ?m.i*RZ(iYc`xj\`rmX">:~:_:_0ocN'G RE=R5TYj0g1;q7,k͵ç)񰽐躻aX )L:ejB340HY1a`OJ)oTz=9Bb^-y#)TD2'Tfi%S%A$2${<֥&nۯǟ}؟P:JdVО۠{,ER( 6JRб\#$Mg,*}}Շv 3S$[Th-%08bw* ydςt\AڴrFč"%O,b`eV ,PD iO(XI\Tx?pncz1"ۛ0=[ eEE;gY1/iM+~XU ;<—?IʋT p< }RԯK923pev\[SFL6*B̯p:.=&&zѵYj<>./*>0oVA\`c`:Dl:L}e6z)MB ;KlָQR@ظ?(f;HQ`vApwTJ{f/eoYoުap0i:1A\eb-ĴQܽD# εF혢vp)Jn;a3igٚrnA"zt9tBUJWIOalL:,*&>&jZT|ͲV..E=Z5 ,3~%ҫ+`:oT¯k6&E]eWZ [4vJz~JL!<-=C(ҞT+?:Baʼn A0X0oQZ:o#as("1xb#BC7ֻuޓAՅ^%Ι&({25If]yW逼y$XhxTXNQ ㈡T.iíJ"a(n5($5I}A5I}Ox OB]f/ x!x:j%_pE{ |8f#N[Zn{<_rb߮Ln9?Sr*u*nfN'!7E>[K$U =-&M"E3u*!}"HeõII哛Ժxtv4judgj{FM )t% DڪNj()I?S4l`JaA `鴊o~_n &00*gӚGqnvjelu.yq|6y^ѝ@8sW)Y{ײ |l͡ye/ZfEeRA$ =D,ʺ(S/iKQ 'G~U*0ХY(qŶeR溇Qӝ1AnRVGbp7(c1IyZ8,qL0F$0_-c,`+i: 0I]ko\Gr+?e_݅_E2$%[^ԝxIΊRA( VWWWݧ{q&йYq6jÙ'Fc+7yoPΏtϘ>9q|> .ÿ+S'o.g.u(>yƝtv~|zu!!Cp]|;w”J'T-<ɚlSJ,?~%{C% |[Cy>"qmZGon}08[p1,?!!5 FdV=ܲ0?AB_|YlG^ɮ<+-9?]^͑nb[r'Hxm]dyLѡd*]t ~D:TMM6+h{w5=g~}XaJWzIzd폋-Qu ]ήQ"(.5-&WIyF9|NƉ{>?)TNݪKpjP}R͚a݈}ӷokx ݰb!  l-FިPEǢ뭢h$LraMk8Zkdsr=~ R,}mWZ+`HQWj1вwjt -x fV4s2|cK5||ЪHb`-/.;۽7mɺE(KmH~D L^Yg,4D2dt)] 0[L!R;:[jc8j\ hsUJ}pxt9dHnBYewϤN_C&UKsAd\ȇT5t)%V7MqV %dl57Ze~ Mf\%be.ւ<6)ec#p ݓc]&>uӼL |EP.n~xc)hd3 N֞#7``Mӿ|7ZXDŽ^G}[^k@Id hv#p31W0)6!]>#}UaFdrӉp4/Q{F4Ts \N#pS{hWrp 8P`]@H:Й'vgOt2Hy@uoݪds6#+4O[QW HЮuM9xr7p=,9@`H "UXk*:lЀP/!Wu!5`J$nÂ9>׀kzڄ&`PJpqCO:ǵooh .F,5*"WҔD 0r:f-Ф7V| XZN]XvǵJ-0 U&VyߙB78^];R%7 0@5[x#Gr/n9>>r>„'Jn~2IxG_.oN+ 4ymr NF}{yo6xq # ;?* BRI {zkt64 A={{msp<ϋ[ӓñvE.aߵi{12zgJ87* /5F܎WmgF' p:g;{uprmZXVmcxoF{[2 jSM5%y:Y-F'cp^k7&WU@+2:I(\l#0E)*LQa ST0E)*LQa ST0E)*LQa ST0E)*LQa ST0E)*LQa ST0E)*LQa ST_ :ހTt<&kcQa.|*L{Qa~0E $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@r@#(vǣ([J X(ؐO}z~|:_^O{ϩg{''o9ܐ8=~w\_\Ԏ̛STƩ嘔ђD$%-hIDK"ZђD$%-hIDK"ZђD$%-hIDK"ZђD$%-hIDK"ZђD$%-hIDK"ZђD$%-h-ܲHGhDK鱈+OT4[E^_q^@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J/H zoGGՔ[=oW7o7]{og}{D c.gux<%kգ.Z>yrۖmK?|\0y!Wӫ_Y|@:c?юM6sd_O#M?9n׳+ƾFӦo.&|z#ڣWs>ߍ#=<? $d}N9onnT}m]@ q`X#6jݑPQndJ+d\U#L6`9>$ԏ'AMzY+cɃ\%;Չ80UW<ȩ\Y1ff7UW|= l)H#&7ZPJd]HBSzض>07xτ.T,D|.GZ1$=mJNH&kM/χ2ĠDBEMXKQ+.CZKgs3U0:#A 5i-2"E\E}uNǟ6/\xR}oydjar*`չ+R/j^wd#Cy~X`zޣ?~98΀C=tfR<([lDѝq}sp|'B><>}4 uԃҲw:u?'<.K|A1CMH`er1,FYػ޶$Uuvwnw. i )(qݯ%Y,9a8lŶ "|ҭyXN|r?С7lmt FO{>n/$e~9dԸ='wi^:5;O}lux[gQ6qלsj :[e]>Ηv_٤_*2 ~k3X;|vjT`|N+k^{ yo@WM>> \jAxi۝S_O1_dhhigv[~u7eVTb=ͫw5.V'U{1kY0L_k>]`&wNz<ͯ2 U,߾ ƹ<Yp`3`:*qqq?-߮+sZ5"U^*^jJvC _0=l {*k/U'#qWmUC`a.ׇY?9qE=uۄEa0l)@~6={l\v^r܈L*M-50y5,5^ě>G~ۤňjr?ᦥYؕ3/yOi1Z+KrW~sww*QQ(/}F ʪ\a0W_UZqF[!H=KmkX-/^B䳽D#]0x]1^=^L:^',hg'Tq+D~4(]bה\2[i3="`dyk  oo6.{lKk>?2bzywI'w[o{E16 z*W5P @sm^cEXAεˡ v35(%MfOkS5f 25^Mn|.nfIurEMՁd;Yxzsop7e0w6VMfnV`G ߓ?K}(:,w'$N0'ku' ru}f╅bd/zav)EԋLj۰ow Ȗm:.typ#+ҢrB$4liII%U[|oC"J&Kg'i00p݉Ijszw,/vt_:cѸPfY7<1Iis Pj7{K; 1"ǁy9_Xvbi*-DDC_zf64oQ@PTѭ8Nx[  ?.V޴rh_<_tLI3ds4U')1/~!ʖhq~;m-6JebA jI1Rɏ+A/ǥeoJja$j|Q(Ai½2YL8$%&`AƳ&t839{v) / OC(SKnIt7}mT65 l\8vL*n|D:ҔehHҩKE"H gZr9'MH:oUr?zj[-|HW^+u@RAj0:Ea*@0%eSITr ©TnW}oOηj\JcJmдoȫ}|a AHRVa S띱Vc&ye4zl5DVΠ泥C N?NfGNu5 GOb6 )X+pG"m\f֙;Lomjwt][&eu!ݙ..k`Ϛn|lMu s6P KW?E.`x!gk3 J*t?m0"y!ڦ6S4LX"nO"teW/W-͡6}a<9($pxD,pA E5J'ZC56xĔ#6FdtvҴ\u+¥bBL1jveZ9^jF\9{:smzAu^92:Qw( g؛_̦[m&jμٶm2emB3RXHTt)iH,%WLt(X{C˗x.dn0-jYy*#TUx-IEdZ03,*͸Jb,A"A$ S/cP:\g^L}؟P:0%F2 hOmˀ=a4( 6JRб\cNc`:? OL>LD#c:އQ1 S)K8bw* yd;AjWaD؈,\:,vfYfZEġc%=rQa7e%p)qu[, |Wܽ/cE "^1+y ,&XVݎFa,?6}{P%V;Bs[L{S1?ܩRCg .U?W.=>MjMӆbF2>sW'feksLb?UXH7S`2E_nä7|,3SB ڻ/Y>FI#¿@OzV:H!~(bChAoFe7)%n2%)u`7 E:^zdWXn`AFV JA^vtUL_{N[l8@ T9 ~Y鷤'B^ꎱ8 12%'>N=2([,{s'"jk Dz]5`,Y~K[Jtr7K <ϮX66SQQJs WU/ʥf#R{$37%"gD*/#Hngv3çEsGs{A 1p)[6] !@/aN(E3fͬ$C~ SVleF%c!"aBQHB(6fIN9&YG9 Ȣrbx_Z HZFn8.3|fj^t*_o69 #q2¬uY]ʪp▱٭$nX)np0sHg[E‰uXTz׍v+_Io IUp;oB!ghM!2#,RFiJGn丯v#6i/]W)kt?[)Ua[y@z׵fPVXq1) <XdX!.A"kEr;QW?wu*xq t6fo{x{eٹF2-\/fu ^֮酣&6(oYƤ+}G;{8'&i5Q;7 qf%*(roDʵǍfmz990Z-hU߷&kB}`PG Vp).}Ԗx93YZC']{JJ47TܳvT̓_5`0|IesE# |4 j-$* %^F@HHLBDN6tC~EY1 3 r lmHm>r iCžX:d+10BB23uS/IN Y$532ڐe'iuvJI{vh@7|cl[VϴO\:$ΕIx5{ݶ"mP f ߂[?f' z,wWɂ1TwކTϚ;I5(Qi镌T A,e9nw' ,Rg2D'(m%k/e*8%b"XZY 5rv#)Љe6a?42?dǞg ̕q>Ƴ oW&K,  BXx'rGYA!r`kЁNxt*5.x-ה BMv$w൶DMohܿU- oLN$ }𐈭J&H+~|GgSma(R9(''!)g:Ǒ+8j%9-$Trᓇ EQ C ta)g[!g}ҟywSG\(9B2`l0emкKܶy>,gɭpom ,Ajmޏեʐe $^0,:L㼨 ?GlLhi9kenG_{OxNjr$@/{5Nw>(F ץH~>NZ@ݐEhDg8NAYCtvU\HrA)oWj}6g[kgOm2ih.RGYxY1y;.zÍ>ר7tBZ[ xy<6 6Dec%♑ɢi`[%ll gJG"̧]`h}$&;f0f FWne#I%amb{&Q)M(d85>Kګ%wnx:Fّ2%a#hDodE2=8NQiU|(ֱR$w l}}w5 $32R19-!wR" s4tAn}Ø6~MwzJMUQ`,xe zⳑ- <4!Euj{M\)mA'mp"+dFJ@!#2sjwI,X7N%;mfǐ?2 T%N oJO=QLxNI3ַL'i |v3Rp/hzdP,q *ЪRĥC$$鉌V};LD:'QjI<ިH8a@0/dN"!R&ILf6b&Y L(iHYtnbv' }Zf7ը>5~P0(;ynL~ZV1"No:(~Ktq|OMp](VVZY)(!7kB/7/1W53D df9iET}]D6rWP\>)<*\vOAtIF?!Wl:ŀQxY ,AV,Ѡ#^k/G !ai \e; ;6p3wh$ton94,,lp$?uZ:*1; '-KʻCw.NV:mjlJ%&B@\EliYh`]B\ -[#/g7ZB'}fj}^H tkr%WFj%j+֛`}}<w/A9¬Mӹ7Դ8*B -.㵺@4Ƚ ""[xmƘ`z44d\凞S3PA}PɁZZJ"B8K锨G#,i$S|rMx+{O0XFe<0xO0Lq\V_P'(rp6~;Xgه]{:Z7XRӊS+꓁Ӭ-X~0]v^_g9jezrs^rդ5۶zGzhzK}Ӫ#XvRh:O "4^Юpu6u(x2QYzD[Dۃj{T0h==qO~k*4_cL L } {KDSYLuVu⯢k1ӼcD[BzM6L"8hᑤ5(&&tAǭjU/b}P+g7;W%;w}ڎ17.grZ)wW YbLޓU)Cނ`:&YtMJi*(%Nx(]ge\K_bUޗ)y_W崗I 3|t=,tP+5Q'DĄaU HЦ frײoÒTo$SQRP6pLHP,''(Ե+cD]' Xzy 5TS~E}W8/BHd}ǦBkDK`o0vP`؃TD]2.J6}W|c_l_OXhBZ0VY8-""Tw_;IB3N`[g0K,hy[1 B2*Z驎³l75d[ ,߇t]R+m t*Nk}puY]<"d (W0(Z:z\'?8&ӄYk>M rT$4yb"%.q3&Q5LS%m9369f#5ʛ3"T謏 } F9\9@Ta `A33#UkQ?[QQJ\HEV/U!X EV+9^BZ)ho*6#Ӣ% °//@\;KPd)F>Atq6Y_׌|W~{vx6u6n;2Q-qeҮơݵE",ɲoz9I[y8}P :+x5 >§ݔ,O~1>MSieExhnp/=_/+sg1esh _z80S,C=Ow<@-S"~|ĽeE:Y{v6*w̹:fO:#9yA=/ 4k/ҲAͨ佣|2Ǽ͌9*=z-7OnҒ΋kbt_y]<&׻w2U'cގ'F_?fjt5Q\$/ j[E/v&J$A#5A9JwEК c[ަ\уJB*ٔ3Amsڒ"mg6:(5K!G? Cjo(ՆǼ$4^\7R^NS g{8W!ڼ-D*j]*j+|֕M{6 άlS9Gr90Ψ,)9f\&ΥlVSo&Tk&jdO~ӛ^)nnfb;&rvމQA1cpg1{FDy7cz@g9ˇyw.u yf$I4grK9S+͙J ]I4-_ pa^D0pvdu2r*k+Sz*Syޠb@ߨs!?'*V.Wj2-2aZ>X>W_Oz?d4p5rψ(W^qA(FV c\ky6jDԲ*3\wj iN ч>%|i|dX T|ϨxrBqV~賨={ӯauW|xǝe)sX# cC*rGvP*_LeU3lr5ZOݪf*AvV ZU$9#ukjF]er9=uy*S) M+!ybOg'z?F_Ǐ?bug-UP-EFoQdFQGo{iIӦyx{;x%҆yJSOd^ O2&R kwܧ=,ܾ@N)72Y 0ò-Evq&\xH-9p "w2NyaKp&KHUb>UR ( m >>KA;8<J\M/nv7Xa);}߯K/IdiE,U4UXwחʲ\v5>o,r1%0Zm_Q͢!BGe`N$F}$B1TDRTi6&͡&5t%-O_ln[^PENM&YRGl-zg^K≃jrK8UډT,@ NEfde)zJK4]r~Kh  W¨VZtb lnL(TSsJͲ` 28%p[Q:%1DUsVHH YpJPK:ݞKvKq۪~){a,GNjA57?p{͎< -:9V%xW~_uݙHE4NHDV!K)H>QnBt\'L20BR| 'VDK%s ˘)B]Z_*5_x-8·QYFzk^]Y,^cYlU[9 &gV%nlγg}ZJ?M}o *Ex.()c8) cЎhWt+::բSӨ)+H$ #X0uXՖ1%Rj),3!'QxwP1Zf\k\ "8OH9Jb<\t5rh1˧TpVpjv3^Ma4g=ksgE_7H%GmD8>݌3kۙ]uIW" G3 pE8*ځWI_q=KmUVDTu@Dᢶ @8e~B~ ]aʽgH6W$u5"yDւܳ'J:fEFlԪv G&g ΊB:4u/z Z]m.Шhd_+ƢlW8vё#T#(KࡘMƃ6$ jQW$Y(ךdX9IF9/ah=ηYa+ǹ/>㹩)cYNm,y<*^yTO2?X";#M/߁<Իeξ\YotWbVbڻTۜ_MYMǿ^Bz<$22uW_L]N/>5\Be7}&IxU/7޿r sF?{&]w}nyq12-'iIrfoG 1i7((Zcrq$Q/tob:C3ws-j1H8j=Fb$*('Wp }bYOd *?=G6ْ3^fTw:^]m`y}*`ӿ>5vYU T#KSZ|=i{;j 0DLf1p1QaĤ뼹NQJϟ-‰2NL`%=V c`JVɁj1(Os+*OR'ʘ,Ү9ӆ,B V&8,E5߷9Yq8)4x:FcTBvݾ orѩAG]$[IEG+B$TH\0QƢO!dkm#I #!`-rY,r议ӔlATϐ%zEJLwOUW_+A$#b*.}&J(RPA AX ݓ1?/6{/QMұR4~+H@YV(&ے؋ɮ#{~8mZ-Uk.x*r5B(bB/* K 2cq$5X {ѵ29 uOj??MNޓ{v1Em]Фd6l@l4na(sVZUg3ūX<1Z!2MtH|p;0̲H: + TLa( =+SAN Z6"B`d>>4ˠc"A a2y$.(K`С^c`8K54mдAӶi/Zö= {!VaeG+V-&q^{)k6_{XGb27LZI<h˰,v"m'oń@X-x%tR*$`AUA{} tpNem)Rw'􆴃RGE%\e,W ucr6D"K5X@+܍&NOڌ|ɗ %'_eOLZriokcpU3WMyxAhS.I.dɹGU|IEl0¤d,΢ k]ZiS7,^rGr` kk5ȼ͎Mߔ}!9c\)YM>Ӽ/&9;T\c4&8kL/fKivޒF#GضgmOOsdt0Ams)Z(k5( ;!_·KUf>-`-gm-F^dfͳXwPG1@]AL#'c^n8'ƙF8KmbG޵?%?1rN<)+_kF*>RE8gue5>ezΙךsQ;pݻ B|D}Sk: ]\zm[ 尼_sW;ykm]hKwO̯Z<;Zt2?`_~^Tq;j'w5kALkׇ#5Vvy:Zx%˛#"FtE\wxIl&w5-{(w FD/l)j Ի'SxդViNаvvn2*³`Y2HZ\0.>jIJFa e()m~%CRevPϦ'_mGŮɒ׍ε}VWYYgyN'>aryr.$jF )H%P0Itb% "Xkc^OOO紉܋g"uz8ޥcnC iG1P+oUYHANdk0HML l ɡ`ojH h,k%Xi[5w^4ؕQ1N +}_i AbSI&GS.y(2Y 2y%Sm i~U|V$H$5̊0]23.T^J VrAHo6eYma \:{Z(X捭ۼPWaH㕵H׫ڼ`_+!AQGsjd215|?ᘚy&U|mg.@~bOF(O NJ˫05G]F !ۧI}‹v|%V>lګ au<o~ƺUiD6U;)m0j7AA>?.0:e|tMV@)-m9zQ5@Gd:"/5;bc7[$Z>hTNx4j~a~f9pk!u˃|\7Z7Ff@vjM()$885p f9/fs>86scذ Y&`g9'ls5Q~_Nd t._ƟNSa5J;{DvhhOѭR<Ϫ㪅/;~jiE p<';KU5 ->Z1w{9jS9Ìc%yb$RHn#t8|ۨ"[Da޴/u6Xo¯+]׈9]48s8# hJdfT IJb4z.{vhvV{k4{g}(>(HULpE:SR(Ne 1z( s/Ϟ(3&<"q?ޜ((Ơ_GN_,K I0v ~HxK>RjZ5PTh&ĺ*ՐrrS1Zd d^Ht+/f1PLWbIF ^{T\QMPx8YUMX֜{ԌXWg'`WQ+ {֗Rq؂("pAI_/I$ #4e4%k=9JD)X+P--A9@hW>[PoP$C(AIeyBf &<7’S8sz LFV-aUw%F(<'gbrq16Y#"T!mvyi[.AC8p7@/ۣ}d89ԀYWck`6Q!$j'=)dsg#{G޺Xs *At:RJaф1RB5Sr޴BD(6v֎73̳t6 63/S~>nkOQM֠O1.k,CGMccs\>>D Mdݠ46džT44twQqZv%6%tw.Wt1&E:א8NIkH`Hb ^Sy0ͦkVRuW7~"OZ}ќWj98&XWǿwɧɆu)%a>zH}*pZ y@k! ?[yP74o\H:Q-m=f魰t,QղhPia51e=d"g7@]2p׊2lpY0m(}0 y r_6pex{ |%DAFĒԹ@9JcP*DOqLS)|V7z[GEu/5f fdoSzR@F* I LY>FvlX)(Z [YL $ɬ7^iMeh=Eo-P8;N]`i~ 4~ 2&-`?%t{Fn,/`Ba;LzdenmɑdǪeL]vn[Ewf;Ƀ&=&Pc^%.ZJk)mvt\b{m1; = <ּN'X+vmUUͧGbHTDibM7Ze=} ć lSuY~|W;>V&\?ba_eJ_vU2(tOj֝ @qӯ謒sLwVeם}(_AdiE[?xB 4<[ɓmnϦ @Uvu]v>U~W݊w/'?Wwj3ξAoX3KxAzu;dҿL@;j˃N\ٿ?z{qO#ls#|yz+dJX* HIh xnZ˭ZGh-;L$ '4Sx+InFoIsi8Huu))9ӞmbAƩLI-j0ucmm¦\ Ӎzc2wCeWEM:ԋvhn[Ƶ}w' Tz"<luFzA,'ur͍t!`oHN;kd49Xb#VG#$ D)JQ%m&CYO W@ǃ~U<;Yj]BHuLru1,4tU뇓HҺ:`U*\c!-6O9WQ#['_NrR2 񘻠 6KsK;c }2 Z e*.AWP*]A;Tضց4-"*iU\Uw8|ܼE|%tX,0P 4GI40doVڑ=خ(ғ VL:Fx.9-jsJI vhlI@t.e#"$%("89c,vgc9Fwcif=I۳WC_ǺpYM^3?}8 iXi髜(  UgT,b듦"Dz 5`8FV~tJK*gr6_A7݀Wd% Ag3} 3 tWVmἽ; >p:.4F[[7%[]w`ifW+p詔0Vc*Z ftQ*m<%YR2怡0q=&[0X=MS4c D5/O.ARl %#˺%tuq`jbD1u 4Tz;YY"uyd!i[en=aw\#xmSbyzfW@Ԉga9ncXj˸SĒ jFG5)&63樔a&nEcqܼٸSVUI}m}+/V M[ʉiQaw[qR7r1^w~ۨzƓD's $#Z( l1&(m*P@@/e{rEs xQ:'cwU| MGϫ:쟆^ ij(]aq}D:gu9ng@Qpkr\kBR=x$,ĝX0^87@:jt݊|YٺrCwPmo$IJlDP@@^DŔ xgOiCdG ʹ«:=?}p7z&QF묡·~.~:a.QF\of]a Štίx߹}&ϙ$`tДL{|s4o;?UIyk7FG8Km0v# f\v>٭8f7e' @V :Yu-} fmJq&O:+f.''n9vF48?@B;sʊyqrQ[$\ 8OBۼ[aU*JQ hu3p,F A[a&m=VQ|4.Rl/dg} A+|7ݰ"wy+Cw"~i|lys>ݖ8+V8߁,?\I?_r9w7%$.P"p@F 'C:*1ָ] tK/XH[k#i>ݜXq%URq, vk~22Jk7`u2D`k7%;hv/ЧѠmGEs;(l;3=Y4"l[ {` 98L ("#6xJl:ϣ҆w*Fxj&&8a.x!GBDX &g `J,|7P Լ5m_*v$\Ave}O:6ڒ@L&NQ+5sf˷knSF՜M-]ȕ:tgݶ>Z S6IWg$]hT.Y|`vog]Of.7yfj|ז7~>- pH?Gk&nMXv8X/Eg'Վήw}dژJ>ڻ " O{l^ u*j4;S[a&p'υAWZ^&o[yTͫ.6`Z*LhRt[uBq 6ft EKޱqwWF-IsSs#"DNNiEh8jo*Ҽd?ٻjHb`I o`t믢lUZc3ȭ܊$3{iPj/I-% ok6^I˦f5UnCzM3-ع ۶Fتa5žaPEf2J77L6 }ݦQ+JAvDdRB E`FsElm =خjx᝸l"QxThF$RҘ0P 3D( /ߓ A(czfl 8 r_A8'ɹ{B)CT:b1p>Jb{m1]]QvKqߚUQ[v%hkUѪR1ƚocG15WɔP?UtI#`  Oĵg~瑪3c:Iũa T$JP&Z4KAȣׂrɋSǂiρ}cbG6 T&Bl^AgK gJG"̧X}H9"yE|x;XQm+#z<^o=$uX%Y,#f~vFΞ-dޅb2#Ar2~kxn>޴BOIXM=f%֏xv ""cT! dDI裧[@,1Y>2yU`)U: 8

*pŃ\^{{l)&~EYȳos= ؿީܫ~TEMQe]:6K4xn6p5FFZ|˪񾜧Uv嵺Jg\ü.b)1"|4X 0%Q!1Vz#08s3} Z+u4˛LZ/n`h\u)+a06 n`:q] 8{m7}v~CUǛm G]M/7g)7G;^@r愡qXɈtgD Gl9B^-́^.B.+K gKh\]XzxcXzxO S Kk0;ٔ"E&k< āy_@dGֶ e| Pyyksrk!^mۦs{D>8r%S㊖Gi(Ҳ0%r͑Ihs2z8l\=5LR"[wLc~̘RʚMLYM=n77x1t Vu& tңCj5|vóA>PT8Ox; t9pY\$/ j[E/v&J$QA#5A9JWJwЛ䕹joY7'Rɦ;3%N3-GD;!l:{]uYkBβzݧڟ(hrIxruPHy=K]NΘB" ^ ))sImi͸LJHe2焧H!^@y'|V`)*oMKg,=*ƇP΢O4=:::+"w6K{iwf*~n-&y%>QUJ(!Dl:u٨%>_$j)` Co2d}5/8Nvo5/ؗOY>qzZfƇ;bJ{4Ĭx?v6,1^J`[\ e+}sI (ƜP= $pꢇ$w#& b{odbo^Pcp[*PHV'S.ygCg㽁ٳD?BklX!ͨIJv܁h;ϽN(o%Y=&ܑ\qW9M93l쎩+wY5gʽqIwyP vg ~51y2x]ԣ;< _vzj>KB߹n[g3/wqf>;7@{8f'Lc2V*躃5?۶M~>g!rx׋}SvIxno~"ߜQeoM<,?gQoؽS!<8YD20\a {c 9o? nAys? Zڃ*]PDx%skvJy-!wx!a)gAZh 6hשkM\&RHB ;\h|ԖhI֝kЦg]7zIwa(Фױn[lZ^˓=Ŕ:w}>b=!zXrAwJr5`sj_O08MGAI0XՋf˖ϻJ9P pVruN #Q۪|)n_)}ʁAP@]&$'"E4=D7K &2!uSH J\3uQ9m^X"%)C$ $F:v :#g7蛻W0<+Ӈ 8! x_ TBFi X 0#ё+MwKyAzI9#I1:^Q5Kɹ3!DD71P*:|$*qRI.䘔g b.k@raF#2.HVvG{K*sL8e-P De.jM :⅐F"|TƑz;#BuT{hnuVqke'P_|Vb47N f /#γ$.8Aϻ dӷ.M߬@uRHJL#(nӌc1 oZt{RKIz.p$)}.k*F^L+MxIgSZ*a0S#m}{(7@ -eNj QG!}tJTģc$S;l+}³=Imғ{"gOKfVa{w" oiUej(?{uR:O "4^Hp) t ǡɄ^)ׁu\a̛ ٖy9/]lf[c"寀+V[4q1 59?Z*nu}O} 7@)UV$57F!6N,$A?%IBr8F$V{mOAe86U(p][ <ʍf\"^kbB <"gQg;]/`}:ތ~y%wөVޝ6磡2*M)rG:ţBmYVn6R,*"Vt)=A~eLL2T=L0Rd=II8 H(%TR]kCw(][돣'f]n&g MC͋_(ht0:I1< 03.2½G%W:(ᕈ٨z/؄fU)@"A.ŻvZz=pD.x#@M'f) 1 -+2Jd$B h纐g!^z@2f %Dip&)ZGj}Xi*BNBZ轤K1%]Ku"κ1m*INNb%'JM+bVFėp7 LliZOϟHyO?'r'\\#p{7}|P0:r}z4+u`rQօ9R2PWrVY',l>oN2CTAT\& m>>h[[To"/y2^[撢-tCu5ʤɯ+Tv~_̓c`v? ~}m K=^v 7+m__3XT!]8L ؙ`F2Wɰ)`")6IMHdU;_~2#u<}*+ѹD֧ֈt-+0U"U"sQWZN]]%*իQWr˥1FճV9rqճ%HYTRvbޕ|u1Jg8YD.V碮UN]Au*sRW@0c磮JJy*QtޢbG{:9s9coxYUaKha?t į9F/~whZ60XaXdhJa{zqSLŋ~WM'5:5MSNr.j:QKԩDeI5MFsA?(ѷYĒC UftT}Gf8ʧ}_cR o^ >`\E/SzG˘:eY^BOmx Iyl*xZ &uDfcF&zGk(紵l*`Nܪ&j oUuV ZUznҲY:In_$$+fG_X>?"oK/2.*I,#eZ )2ZpW&!ELOD-h/?}ß+1l[QB یx̃5;ߙ;(; c,kJ0J"bX{x܀],蹔ԛ]Ѥ~(/&Qjb7wža=*h)Ypt>eK9wm$m½!&;W޼H¹q[1Є逰d~sVk/;[x->cpqn`W:`099P9C;v'֝|< O kR.(ڛ~><\kLֶW@ǴYTQg*i+x@!;c%7$v~Ձ@'oMaaYKuЎEKӄAaʌGTDq5'lQB`d#VDbmOGC U (iU"bK(F[ :ʁVeٌPSFwߛލkin<W#wzyB]LEnnTn۰GW D vi j"WM{:O>}4QIѷ3$m=16$b}$qJ&~)lb@5|{(Ԥ4~> zu|SNX`T9F$㑅H>HԔ1тFQ4`pHnz5c>mv7Ʒ+*͚+x&.ʦZz+xz(0sNnNRnd =!"}J\bhp%J7bi1g;Ja8҆$ Ȳ3 eSnVK~S.tP9XMfnV`͠_3ե N.OG.  f'̥Ⱥ:`uBB1^^4L ,j$SgԶL 4L "[m$U],Oi2N˝[4*HJ*㫿=hZLL\$Tc*s2zslosFZ3xRjY5<0IUM8TU6jTΠNxSF^*+%80k^TEv]9ch;鼯]jjMu(ʋʻ/ޖ6NZ}ry?! ʢĸj#x˵41ASKjR C@S ήLuۥ{6擬΍s;}c6Zh\܊0nf:б7n4K%=ӂ)8[Fƃ^%T-u|_kert'^ߢUUoIH/%˭"Qis?6x]W_1]մ3c`ٔE8 ߃}#q QJɒ%@!j*|H2MIߧ {[=fo֛#oܛ b[ yfQS=9!zF.^:,?N)4‰`8cEg#@FqB8'<eTcHt}ne/Z]/W"c;})S R V]Q &q_=:jVʁJ2I(Sl C:7r.ͧ(pjQQEVa$A {8GB =pQ|Q~LM_ WjGn4y؏`N#_@Zw{Q19,5"~T讇)E^S,g XYașQ8=ץurB Ake)BJB~Ws21G9ɳiyWa{ Piq]~./O/ze՝_7az껽qqOy|?Xe/ߗgtaiC{Of'/'?w_zvh E&yy)+g)S2:⦍g1tfY$XLjAu@ndnuH瀎m`k.PHx3j2#,RFԛJ;t=ѱ9d7$iI mVLnA~pTiw"Ӥ5/] 3(P"KȳvuЂk'`3ȴ(C\JgD֊ε;n7?MNW誃~cTA ~p=U%FyvXgJI_:blN=sxӳX%FԀJ6{}< KJEAg;|]j EǞyZפ|xь{k ]g\GRi 0&UmR˜p'+eD00C;(r*Q(Q$Sy#8Ÿ@R9RDC;K_D͸ -wcJPIPnD-t*\T+uAS$$Z]"y 8 €ZjaĔ|\ZO!pgfZs$;!qPD*$X1R6*s@MNr]X3Յ.UTL>f$8lc<қlAcӠ?~]$SOHa6l`;b5Kpb&&!`d1t liτJHXRYﴠ h2y SK %Bඩq_V2dhJT3 $( Xʂ*5`(nӀJؖ@*iUN)iE'tlݮstɄ'|^y)7~+H] DǙm$%LhDY1V%V%V%gV%ڛGGix?6'S~蒦$4J2ߠ:NQD4VA`h4Wg~^!tM$0ÅHT;L2iK9S/Ȳ AJ &thp`2Qf$hZY"=&b bXm9v7ѯkp<ĝøsNno އvl͒ |AȘ*0靊`@Fd:ȭHOd0` {CErY#ER9Xgښ"EjKe£eB<@"goY@ǣ4mrU]9YmYvZ,SuKڙ.\EE7SwTU~.A; QW\uUR23 RW`;2 2\B*9U]}u|1qqu> }Ԛ +3;ԕW_4?x_>/%x4J7}"^= {~E&{vt'y0>!q1=i)WC6!E펢^\"%u b:2TuE]ej9vuRk*KY.zsVn ͏2~l㷳;O1Z0C??o`ӆoDC7GJp͘p^}}sjҀi(iٽ7F^/(>7dY@{wmuGXr777f(Hѿ5L9xh&C A|x͓6%㓊OAx?;h3Ŗ"eз7d16fF <4 }{9Ѵ{?m/k;Y(ךY +> BVh ZQ+4jFШBVh ZQ+4>{~g{,KLv( V @+L-g|T /|(Fkvݾkvݾkvݾ?;FI\oR}`~ `~]}nߵw]!G!R΅=&N_0_yr'Aěs׀516&6TES<E^WZVS_/~/?+_qPʋY˵6mZ ~Θ 2,mD_?oR}Ahu(t)!GzcrW?{7 _Bs6wrC_w<0=v,{y Ƨ|_Km6oͶ" - Ĝ Yzhoy/+SJ&LNCcFS²TqՉiX㝐 0OZ8L Z(h>,Nyx4 ~ F77Y!ռb#qg3ކכ-"uD ׇ|ޓ_*߾9I97'rs>۝8AaX?9ӾonͼV` Ex_f{Nka;?cJDOSq`w\h/؞ݼS*-m֙褢crϓY7CZ[WJPԤFN''|_@.O3U!9OTȸ1!Q!Yp(=pd@YP\0*DVZ@КLJoU{jE+dS9 7cm; ¦UWef),5BK}&/ֿhvs#q3M% F 5jѸm$"*NYͽ/ZieUqgPiy&%m=Q1}4.2bsv&D5I⮺82 Ϝ97#-uwZyT6OPަ+n]hZmat| >G/axqn{7R~d{Ym@fR@xtٖBKA*МL0y ޕ9Zi}cR: y?t\c"hm;gPy{34"1vLcb:URƄ>Y-#QM/ #F:*Q(Q$Sy#8cl\ N)}Rx3nJ;^w]CX1>zXqh9j[T+]Ti,73V0H$j΂0๖Z@E21%S(V)&C┡>R!ǂAVL$ TiXq7J.,BYX O<}-Ƴ'?/}g ; &+K(Sy0f60CK%8 A1TBKhD02WMlVgD%$, wZP@sa4< )D%L{(.$n:*kKHe*cHƌH:NM򠀥,H.8!(Q=VQy(!c[UҪUI;uZN ;NXˮsta̟%[}4 Fhe RS]yKoDg2G9ZphٞT*'BS[ vAiD!hukg] 4b5Qki+[4B\R)c$\'̀;m<8v(Cd(\P`HbMdW>؃{R*񷸷F|aP1HZVhqy9!Hq`c2ͽޔ* 3F5) %QP~2~lJt=r(G_${sI=F^n6Gc׸U@ d-.en+vJ\}@}| н!caF!G Es""3΁4\5)OMF ^^ļ)/yþ&ΏE$,lӢ99%bҋ(\&tF AEDt&*.XC h}n|QCm0iA_v+#my ,WT&ɯ^v7|TȁN"DEqRB>, 98 9C 2h%9( E$꘸^Z01)3@NPyi˵)I"i%Y4AdarȿͫѰ xyvfNL5KH%d :Q?@Sj sCS.Sq1r )Rr.&9%-,P-f44QM ~G UWD Nj5Ʌ̣B .8lgѸdDdy+*K 8hyEM=d-ƲfZEkSDx!$'tq.rcv^~[^Πx.w%UTP,d{ _O?_~BW;t^Up: *핕*&}ĺ11BH4+5RV&NH ޒy2(Goe~GMZW5Rzpϫw?^췁o^~Kxs:-=͒$}F35퍦9HVܢ?p'G-V^G|gI+¶uh>FG9~ Y-P-("! F'64c,ZX(1 oZ4ťso6J")4 #zjAKy&/&H=li1*c×+!ELXzlay`dTv2`2Cw]0TH/D B6ީ&\H*R!\?!4-: m9eR5]V  :#;-eNj QG!}>^锨G#,)ISHb(`ɀʸK)sFɠ3qYQ~B3ue~wՇW`C`9N3.>"RѣJ *-g7%AN~ <+e.o70Ar4;~Oo>0%`2y: Z&IxuqP,cwuTh%.$"+ewzRXk玾ЂNOU:.6 lf>_BB>;cƠO79mNxQdHIsi(-t\<ȧOMqۑWD[l IkOw:~H } Zi"d_A2*w6$r|58A>vP&>MʹaExo2Y-__52f˘c˺ |=3cLLw%-/+ $jBdUkQ_R={80W,!mۄ<]z>^(&yJKr+.Vҭ:erK}:yL>oc{ˊju0z8l01=Otm6I;&1q?gQ)L̤"Ǿ{r:oXNմ,TZPyM}KUQgցPԤ=q868i7cf{CH^2 6&*^ZL/*>(p!IaD3 S:`G*Wi.G8Zf9f'AA# ӒdR69)hBAfD7K &2!uN$%hW\[/, !NZu_4AdNM-go0&Ѱ Sɵn^I/F| չV\ olϗSӵ x_O# AaTG#W?{Ʊ s& .lp<,'X`7FWkdHʶ~Cŋ8Hli. ZeA+Gs΢@Mo 76wB&VI!G",+{IѱEQ蔳.gbN6qr.ץ@M ]B:q"enơtZA& uWX# 9` :yiC, On4-OeA;oNO182%{HR0\*Z-];#}פuD CuHf)=]n*kotx:I*[AőN/"+G/-c .`Uō:+Zq{*'QsyqeG.'uƁhNh* VKr鵡WU;{:i<)$PKkR2D'R("/.D23H" -rW۩=ǽ4%C6I/,z;|IճL(~Y2э$% ̫~uԂjCV4.eq=:WV+O'v5~5#O{~MP|T_O/+ehnꭇJC>59.L oXӾ;g?6paH^PZOdQ1H1I;׆;gIze쮌Y)d"$lVDs$}0TRE2!Q C jC6KZ/iETO*nY郕X4E_eU׶qTrJ yU3Zqv'ɍ T+e,90ޕZhHə9ubAXk ݠi_eJ1l/>lf|syiPs,ō3ϋ+(W8yn&3vv=]Lfӥ=.o5WJƠt]L_|i^ Hrs}9Y(}=G%K1~ A(Xw_ g pnո2-u,zwSTh>u!z1}yVoG{98|nhuA`%wR/I-/- ?gٛ {80 :K D=syGgmsVg" ;r\ cٻT s6arPxM 9 4G4^Lu"y詫zDXβ[x1^cT_4~F `%B}iwX֫/xDt?IJ%{"5X FF\U<\s$~F5K3ySe٪쵼YjsksJFW9IQ]){AbR.FRBEd1q%Azm͢SWL t<,8R>.GHRRBPFg(,Xcg:@}n.[z_g]F7nN] GcM dA~ ڨ"$TtLytvtXßѬsm6Vl}2Ӷd&dGhZPiըWJ yn ?6DWd[7XYdeZ:{@͜t9wVP͉Rϫ$ g&ĤV,0'bNcL'80Of%נf츍d-댇RK dL*BFg}D2rD9RT"93\P醛Ԩn<>^$N§/&jr/^q[j*0Ku]#ژ۶ɗq4_kѫQQo J+s!yҾV+E FV*3,t["zP~uaT&kٚyl./@͆l1>ۛy(m_Nx{9r/R(L@QfLY1,y)Kowȓ3"L\fNXnbo U8]m̧yôu /JO:~kEYò^nz܀,T%|"DlԺTG)x$ކܔ B9x6T]>:{xZhi0l>ly 3RG{\w/Hf/`O0Yֽn# V//,[ayƴݽvVGmmC@XD۶qEAOATrQK(w#Crۤl8#@ҲQl S]ъL*Pd̉ ?fX?BQ=; wywtK#bTpOaQ&od4&'͌%*ed3(]oj;nnXOhiWUZ۬ mh'TY7ށ򪴨yp8mq҂o e)KBPX3HţV;PB%zBcf BAV|Ojڢyt=KDz݇)0aNV8,LB.WRJл^P)ryoAAwk0}y1k!KFzYj n*o OȈ >O`j{t&3~xd* ƓL\)c)]Jc&CdrUS[DP2sÌ7`SAp2zDwV8+ҞM3`]Jbm:^̮&s9#r*L׻|6.|DŽ;NjЪ%9 l;nvLڃK[I[Fpꪩ+vn'3?#2Iuu6WFW-wXq5;ܹn[fFtj浑a6lo܆yҶw3l&1qtdvu}z=v~ؿt=z9;^ؠCq([n+8DŤ`|-&*i< F9dB̕uTLV>WyŦOZMg쿟g?F2kdۧh\x2AH,'hC"l\`ZyDH'\ܪ}zbG&g{!#PwYҵ^8wi%}NKks{;ްzݍcI;|r5 ^Bl:jCIQKBQ_ex]mo9+?6_o܇ ELj"y%9e%_- &>U,@gAӧ $}:]In#ݭtË w6gC-k$i2X3(vB'T Tnteo@A09QolHBiEhI)Q A%G~TT1kHJ"Yp&xBm0G2AN!C&R rt꧄ 0ѭc3lX~:Fzr~[޲.{{!gO{GRkItOpw)Y5yptX55x ނH/mr7l^;N>uJ{ҔJ)m[V@JtEaP$J%or6IFQ)^Fֿ2E ۍSڰp9䎾ĹDhJ۰7=5o]WhT^Q͝&u(ZYLl &jo YFۈNzEF]ɹ$jztrK ba{ndb ̀OmjFlӝ 77~wA Ԙ}^D&sQiȂ!9R16X|f[ #:%Ɔ :,dBv=CaDlA*{nNI\n j݀D`/0Y!l@`n^H|M"j%K(R^X1À=dfMPc$*R-G(>S.& fR\=ao܍Sgeƭ}Ǧ{FD?  }UR:2e(IR! ȒNGd'WI@G~dNK,m-m- =zvbnv-hH${D2ku0'+5\t[>zi@h)&[gP)6!*a`}SfW UyLBx7e}_R1N,( iM@ƪd5H:A1}Ϝ^4&+㜌DR)58n8}IkC62( 2M{fItvX#w8cC |pw#nُUdt˕;StVOh8D*fu(EgPDE*J} 猇<ː6C1?ǧsm>Ccm3ZY ʵkx `0x'cȆ@VjG]d+{= Ŋ{D 5 )r #$]wr*N8xlE~6A۟kh!a,R#wpLl8-XPEʈUYD&E _CdSIdLURHS yNH Z0һ,< DPVϥȂ`OOSֲw_5s(N#=Nhj0Aw݇|qӫTxiTɴ{՗&m;^// sQNNjLF_h1~i P1zTc$"z1 ե=m`7!HtQ>OF{x(ߵ?k'" _\ԼօC xw]Ijdo^ҳWnBI!ٴOZ3>|ܧ}fycGdDvSϘ+5ꕸ/U} ,Zݪ3~f[zxP5$CE{Eۏo?]V[x1W]UK_n6˯סAzV.{|ir]Xt<]|^ly%k/|'uq4P"XƵ TwOۙ;F՗K|UG*j=$@LV&wMٜ|Z6^9&[-ddb1Tfdu̙Fc$ {L-:g;Mk=lkt[cTZ0QiR MF!hV2T8CVM'6I 2mxv׽Zipk Đ`2 <:4 qfyÃ5J9i`V 2)Iu"26`!էx&5LMJ$utB].SdaKW҇A N;,mtA HRLkMnLƒ5*{)+3M{m^W'1m#7A+=%Wj)e+pY[,epK E]E3bK,%\(t6v}F/ g$|4!+͏PID|BKJ*xv^y—(bt1C9خ1`iRĦE*el`\Xc)DD4`c[dc3º.lHe{[{Eom7p sQmYz}m]wfЙ L8jՠ~avKG;-utv8~r~e[;k+"oFHޛe`:H nWyfvj7||j-6,G?womgFg~iv?༣ώul20)<]|V #+QOfό˳ђ9=낋}|L&8U!'C3ٙl DjD2m0l w.-pw.tƆ,-  u&CN)}sV )h]XJkQNU1&Ղe@dcQ)9uBs7qxJn}[_f,vJsn; (ju< yjKc{>y2?h;P59ɧD(@QJ "oiB仭1 mGF-TsLO'cYv zµ y=kc0%R.&L붐y &e`@F Bo 4G++'lvUsEo|Nc?2V뮸c/C1p|U,#~!bAksOIRRY qirPtˮ9ρ~n!LC}T&IVMeE[<؀V%LTV2 le=Ç)@Eb nFϜgaQ=Ĺg ,ElNf϶|G_vͲH$7Е}.@Yyn''WF2+b-3"j'Qr5Ւ)r gx6~`$bj)e$ YPI EdgWu\P]F7qy7`dztQe} mYSt[+ao{gzX@t1|+ usoxYݧco߂.`6:55/_rGcJɺbvo^ȿV{ChX`PfgTqޕe4UZ۾Ji`XF x;WU`;WU\3pU [W,vWW!S^ \]+l;\U)~] Zp@ ;ݫ;Z"p@)*p];&]/.\R*.Um*w\BR\]O}7:׳TfY(pݥvZ{r@5 F}(_D%u7t]59FFO㮴cy4}5TUtiVeϧc:l vo@A09c^+L/d:_|ڕ\nxj bv2.x~/U@*u-U7͝+5oF''iZ{Kѣ>[oL!&ʆ$P&J*9p8^~q:9dg$ĽJ-r[ ! ߥ$2agbW]UZm*V)a_#U4u vX`ng5ng,nእ^pJz"+hctz2/??.s8cWsq-݅y!-u{l`{T&Jc6!&&r"%]N.yMggBK)" JS&$Ssչ19G$StٻFrmI,Ip;Q(#K^<^_p{-Yܲv=n6*XU/K&VhrՌc ;q:8l&,'m~p.gE2:"0rZB¨7 \]s )4pyOrY}%Y$Ca,&Fec/BYY(PRr#)73W܋EI࡭FcRݬד la(!3*2MJDFjfh)*$oFos /x+ډ=\gMj _g]kJZ*ReTY)UV*JRn[*~onǍTtp9ZFZQ4U11Hg~;tv}3MKc'K^p 愆S~rWQ^#1yjW*}O53^J S/䤔`K(?Hn/Ql'gclqdqʍ~`%'C(Pa`i:c?yP$6vxY3_ە!ʠ[hCd3qJuofv 7_M^ZǴ0ڦ/2_AEϩߨoU`tY"7')7G ~" lŻ9wnF>\[[#Zb{7]nսu\\6H,YsŲ.A.XPP<8L< wcK. ~703iCe=oTu-wJZn^4:6vU-H*gp7m[46 L\$tdޱٸ69l] |8nn^Z 1j-"/#wE{aųy_Xg"@'}kD}j>;CQuq887CCH^2 6&*^ZL· JevG*דYg^;;+Dn%U7.ސ/>G22۾+1wvUۺǭG}.wmxиyK-C>ds󻻷F:߃'㴛g4V*os髛;XsmzYo0ӿ,R5ΒiCst̒Vl7ͅd 4^|s>ji'ATYgL%*6=hNY1a1o{X6*5LR `9),V9z`EAG+$'vͱ+'c6l|%et)$4]c3v.:RD ԓi|%"j6lV NbeU0!|L8Y@ҷqAK0~'!hrΣ Ve4x7zgglXJ z#HQǃT:p17KU[2 _UJ&F:ZKobI`^):L)cs(5 I! 1RFe:%*EKZygsSf5*e{<*o 3߯? 2c/P1#B 7jOZ~v7) B+"*GUPO*"hUvG 'xCpuFQ<8X [agbӭGAֿo=&o܏O}Xd)G8|>g7@)UV$57F!N,$A$IBr8F$V{op 2b "2KXGьK$kML;/rv(\M=}\Ba I{O=ghjvrʨ6qЦc>((#m2YH)Dɕ TDU3*S{h)C!cEѤ$L$wJ*5co֌Qʛ8P]zօ£[$nzC:}6h4 VXb](11BZAlRVDUz$p"#{ Ymr^:GM@X$YcFn'd\;ںg.v+NqЄ`h f)m%jPAKJHHyg5T/h!#2(r8h5HP #(|L2DRs=٭~^.+Ɠ}ǡMшE#nMe#^AAOЂ`Tz=OAƪ֚jSsk(!J A NmRK }0(9ВfBs9Jg95}ėBZw]RtI =V!zƶ*6rpDrsςӋ><8NVQiU |e8:Tp!w1BN;~ %ID`>B}"apiԚ:\h8AxgKPW9p}]gBPN~Ed*\Rp, cU[-b>{bô<⢳rWz Quz YOJ (O:4zGq9FCp)Ɨ/:^K+J"J\r1~|mvhtS?PD2V`TDCAevp=WBƽqh8q+?2 \J2Ԟ$z4j sumfg4M߄q(ZK>\m[t3i^# b`Ѭqe ;֭48gb\l0 SH0ރHͻ%7 qp=Y8r(;c/e+X"g]6Zn/4uNB^r=Fu5}0moy1,?=wx"F7t.??4KK5|wm>3V^ ?Sg߶;eG+V̨ȍ{L'_Q/Ogg+8PۀUņ}EH)8Bd\"N*#Qq%n#)R}ALK.Ih\%'"u"M(("$꘸^Z01)3ڨ$$5*XӖk녥!R"1DҀJhh6joQlLFp/0b9*8#\l# ٻ6$U  -쮱Y'HpK Z&eGYwiai$lOTUJ G<ޟ]4#gT -lr9\?X8*CWc"0pgo uӳ43 Y]У z#+N8Kg/Bm^wH*o-W)41J$XE Ԑr_TCR:8!z;"a*GϷ {7զΠ~v/dFt^"*Ї/Ve.@:M% ,V1DQL?gdyeR?TirХ&zVܘ,TJOH'9>WK`Ԛ C>JZRgMVK%w/?d:#g?BGfJ;_3i'v:i_~tWf͋`} (ǘ~ .p^dlwd4*)e$K!%ȕ`އ I(e{I|Vw}2;|*y_-j[خ/Ro#+RH두vڱΑVJ"a=]6&?ԫ(AXe}SFWr r@x TGܭfJ0t3R\fȴ^,UƘaAb.ig1s',7Nn>ၻRv(K Oܡ7G!5A!&L ʉV`d:rWw-1|mS gZmHWV5et]4ūӏA]ӂuG«V{8#1y3b迍5, u3Uֵy-K$LAg Zсֻ.ҨM..\uW{I UZ'Ӕ b٩ Y/QF)B(%]R( .,MF$r|!31g1j:~,ZDŽoY%y)aRfdѡ/_ 짯%I)Գa­]?-W^懫ԖFaGLe!T(L|8u\BqBLֿ2 m{p/{+GP lLq*Id&d"oK)ETNDH$!8J%BVei܃%R]6r*)2!댜}1i}DRPZKMM><,#? wh)Ҿ"jR7[gZ?qp$#-2Ȩ,X_ų!1 S(,3_B>^|jG"@i02!3pLANͳv=3x7@j/w?x÷5t_x? ?_^jJZId:׾w]r'}; |6$]9i\sBuXOҫ]S'q^74:eM׻7rtez & VhPKCzRdJ&N;׬umZ0tO:XJK$s >_>SAu1b8|/\Sa=wdP_Ղo~x_?oϻͩ~4Hf>2J!I8pI_]yJ{n\ :xQ){b\WAb\t6n{%Lg7 Q,:3IiwkpmbKi^^xN^~g@@A\b5'x$-j\qgL4 uuPF{ N,<~z9: q3REH{\Q{\R2YFɬIQ9I bRjuQzS^gqzwwmǸuG|btP*'F?F}c\*bSקJy" ʕTbBi;+kIcV=tZ]`iuk4K0 (EB AE..$fMA& soo7y;)ßYQs(5qhyrϱ o_w>,űGG=GH( QF)T5MG]$25iXeRAVUrN|B݃O#nK8dHǛjsu>H}dt,=z$HUʦZL(WYWi2M!vif(7!2O$}ha _zWmOBu6ߋ;Fik=wl21޲uɉ+@~[ Z=Z{EW|eCo7;(HN9bT*Žpqfyz|a=q2R^dsA(Kwg%Xm/֦qC)eͬΥꧠE%8.<.%YUF ׌ u+H91@[]Xb2yBR) 0BDbȍJsgx࣫Ll@5'hĠ5wqō3ţ_j^mR7LnkSήIFŸXti1|C>jg7Oz0xWMĪE%pL+vT7^WĖ%_L'߫$Wl3u  p[GX|p9.n}`Y3$="/m1WSQ@4$&z>r5sC(%OIN]V)'g`_f24_%04LR<{uQpG^q1': k$z_I=e |h[ YNJtGN>X7Uo&WK#WM0Xҋ'p:H ]g7 MW$dA[w9Ⓤ"҂X:"8l焳D|!d ܆u~ruJD-L΋́5}NY@ )GKEE0R:8!z;"a*YPG<ѡο x3w>;(N%4}lh_vJT9Ybÿ{6$@vq%;$A6l&X`/OYgԑggHŇD#9S]U]U]k74>?Sds%ȆN ϖ :mhl2Hb6iΓcu1rfBn#uIvAXGRJҗļ6#Y8 C^f"w]3pSw!e€LkqjP_r9ٿPN7^y!G+fD&A'n\3Ku,6#:@4'4AsKo%З {-boNNt ٱ]R5)Z)TYHx"$f ZVH5Zg~bv^_dR ]O0zMrnv^{!Q2猷74iZfЁhCXv eMfRfޫ^S"\y

qIV{mINd` "ېd6>K(br<@Hn4@% f&t]+ph:7VgBnJC OzŻ˨Gu$)WufTZDT bFi#lS!J\[3*s{Vʘpb=)`z]>: 219cg°QJy^f<yy^yJuV掹'&aV.on0 .&/c[bdu!M}BdIJ^'EKJY@'V2u=cDFșxCaB+ۨ3{LrRBrHm ۏqI<v`0@dB MN1G q9SO!:=/yA @<#4cw{W9KREy}x̥Ll9 SDw5}JVc)BCsg %MR=1DJ4gqn\ DFh:Q{0g Y т[T&.5JteF)i@vtvIxxtXѪ55}빽jǒFnY0;ՏWu_>P5gwHQcw&SJyBЉG]Kɐxr$[ĻJo_2M8yɖh)II z&Yي Qʋ9n&(ǷݦzsW8!%{>MϤ)ܒyxm5VuBV\E̗1myyddk"g&@r0Dzcv,uy椭R9Mvi*J ſӓFD~.Q$pgUTi~'A̞'wA$(5]L: +o| :kpr6Y+ם _t eK"kݻBgMWmCi_$/<ݦ- 3nw;RTj;?9`PU{ILI|7?0+z=nʐEK;gׯO+zت/p'zs`uÅ43m&;ף#57 mY' XkCMcfv#טCnmr-j Fn 8KmS٠Aq^#REQuɑ{BaSGL.@2pC!D3ef;֖;gcBM[IWʤh_|ֳػU.ClʎmKS0z3Q:rJܕQhV1A-gL䖣A|6Z/XJd8\L28l Hf9A GD::3pSr@hY;Z]\H)ZzEo;jpqC aר5= Zk^ (rAC0 aV1<"vUp4쪀o"h5}?pvUgW_!+GĮ  "p5;@+䡳+ҀȮP/igBժIfucU,)ێv8gR1Q_yUGӶi$m?͞5E8?u&)JK+iO?zJIŸfAjя~_;RSBhan0 |8L/d\ާiw F[7]ynjA0Q}XXJbͫjꊾNx5Ų %'Uy}s"Bm3FgKVr Q?3xaH G#Mp 4AZ.M(P@ne;vEK+] 䱰-⡳= ٕ1  "pI8vU/](7-J’n5e,kuƓtxk:0Yr}ַcy7OxVDc.d#FXm⵱c* p^S$Ws#z3Zi~?wlFWGQΉ'ANv3/-,*kLwuSGwQ:輳G:%%\y*G Nƣi,/iaWyb?0_ӭU ՝Ͻip2yš"cDvHDa߷+zKͲ.E7O%o7'OMEK2˺=e AN{ XF'O[.][DܽPK"PBC-4w# 1 S#2Yd\J42i">6χV s-m&$RrsA L+ϝęm9a @`裱& Eی୍("qlTNLy.\82/$֭l>eBM !3e8IO$;2?_.c- =o?!-BHBa=_ l$(x_gxER$Nd/$=AHGe\ť4M]G*LZ1\ߛ4N.ZW|i.}_eE}Z29< C Ï4Ԯl7cj&V4 gr[vm~~nk0u_U<{X ]`é/?f Zkj,S^zJ1Rveŀwsy9w ,qY G^$MIEO-[ Ҟ`mfl_V Dg2w3((O2 pѧY&w)Lq<]j20 ؔG>< r‘cc[gmeV ȴy{`MEm[»w]U[ 76iZ\6w]z`&xͫԺKm{dK6 y6eM-u,wv~mNqwyf|( S79w0oGuY;)c3XlCMtJ1?mGxs>n^db Rt9}Z$-xk:v>USDdn|mDru<(g%fXSeGW=v ʳ뱏eQ H8ZcyjMHҢa{%wxSdh93gA 6n:2僐KZE&76w/L*&.GmŮKwΎv=+Oby] 5m)Mb_qv{7Wlu˭h5˲Z6ELAE-mN̗h8Ӡ4h#ȷm%\ u\OHԸ/|UP1pWUmnX l,`(!$%dj{ᝃ !n? 'b@"V4zˉ{蓅D^ɬ98KZYIԣɔcsmMRpDfZ%g@(+\BK.HR,Kd,n- iƲ{L:gQjYPgɈwe)*iXN?PկMpR,jU5I\O.·sk6)*flNx[}?tWl,w||W1hrX}H8Պr-47L"zi(>z}Cr{;oQը{5鈤}[52&rެ6/+/5pVb,֗4F7Mhi{W3{wCiTʅ3i[0WA1h)DhGxxj9r}H4Y/ܠm6LaPL @vd6do"IMJx Խbvci^s z&tN'S.Oֳ{Fv;r\ eٻĬ"Q%0m p'ȋnJ H& @],Ə#ڼZMW*:[FٳX<L j3?߆޵6u#ٿo3SkxVjVf?lͣR%ۿ~|XԃL),˼}/>[5&4HO.9[-֚D٘nm7OyK|J=/9~/e[hGV>8ՏЇ^:H=y򑾍/K|G:$ig9wC\fB2ٜ.S/YVEek1 b5&M HX#1T]ELHfs;lg-a ؿp9Ǩrgi7*b!lQZjA˘%>wY}tߕY_ukxX!nVj'g'd#aվԒL(*b5DWyQԀi!.hQx/8`BobA%{+/2g=[W秣i74͢Y @|xF`$Tft(j+[gZ9@ꗞsH٣z/DJZcZBQ>^73zYeHsKLagw0ԛKU15MoYd;#kPc5(]LtXGۥ}L81v|L3MJ*U 6Y \qHrM $DCPƟ8 ˭'zl*݋ATˁI5 pT;'j@¾4+-D ^٣˖4"Syaz(ҐbR3Ycd]5Ȗn-w ;DtV9S(BX5:J R-,HE4%f"U՘ VbԬDI}*%GN:\v3g}wq,[yK0!ϼx@{ZdLyq=皞'ݓ GjOYAakԇiY4f^ $ړ7QEB 0$PQ)JxfCϣJ:ܞ]ocbK&c4Z/,jA^1]4P5zUݱ{=>٫5IVI޳NlgYg?_fõҟ{ؖ{s?`gDTj (==Dݖv۽tpwL&ީэnWrv׏ K Wmu5?gt}k7kAX,ߏK5{]p8]pSL/$6@uBtU "NI9h|T(.龒 \P4(jTw#oX2H4n҆L% &Ͻ%9{~tՙ2A L&7D5]`W^Jᠶd݄^!PSh B ko (M ֌Fۘ%N""֔D[cR7kH cA_+DW:Ji V͜T\R yƦXh;ca,*^ӥ|V6ivROcw!Yƒ (|&y_=Q\pAm ڗr|ÇSR LPVqR(G`3S*vU*v]*vVJz5"1c*MdJQC֪C"A6c[$%* RȤ5Q*(rJ@݀$K;.YϤ6)7U> ?0,֫@~l[ 1ƭTMfw]^hh~ D!jm "gW:b2F'JT2rVtà{{cLkqVl'/چ`WXre]㛧oJ/³ iN$dzwCm4w!T*oF϶9yu?!:Ve$:B*6"1"Y|/L V/njQE[I]_nBBS9[lݓmz&S-]3 )XU'Q)QL%ᅣ.74̃֊7^Xޝ t%U/Ϊ<!8aE6~2d3X:PAdvEhOƅm`C*dڞӊ-wRFkA BF4([ VEy'Q,|U9OD>asI3xan۞0/fY"pvws wni>& 9Ces+dma0a5iQMط%ij)[B+QƢ!R kW3ZۗJOO{=vi'r{eTL wH2nd#UPdܚ ]ƭYi,˸@7eᔤ/<M.ǟf n9= tt-uJC5 < !Y3x9&gnioxoqOl[{{6⥆hQȚM0j%dEԥFECJTWNZ?U@[蓤P!˖jWeR` !,'[EAk Y31}ڋЛE\XyKsh{;xߖdU Bn'o]o\'0Wr Z a~/J:ׁW@e1!AV63i}>Fc1~ډmokɮA^Z}r:B#'d;m`9|TQavA B։8Ab|u3yij!Q ی?GGGq=:u'~{}~3˛;{-|inw?O??~?Rh Z3hOɍ<B(bHn T db%vm h[dQi}+hs:@qb^oVهA=3"K@:=0St5U.9@pBbm_H*lvd1HBQ&T)0 o$j<ٱ8By 3k;ai5lt9ف=fBf&'l{,OKg:v6YslNpo+f-4[x@ֻ/saw~Ŗ< .6p?A 1%w%:Ҍ_n/ufEO<i; m[, }%IHb@J 58cb"K0O'{\_rEŽ/x_5:]~iJg!`*L&3,Z.!,Ukm4 z}@y`'ä\x$[|ږhKV)ǻiwnG^Xu7VOzȐJ ebn,],],],],] -Y[vkDZt1Kt1Kt1S̎Hl4s]8كfm.{u± =`TP2G-:UduVZguVZguVZgu^84NnAYiYiYiYi+o yYYY YxOW.f,],]'JYYYYYYYYYY3Kt1Kt1K3t1Kt1Kt1K3db.f,],]̠,],]JwmH_i( w ;7AGHZINƳݒ,;%K-[HndUWbҒ,2 b)[+f(+ȕVLUV\tL/[X7FH( QD'TC2l}dFeDP0Vl{O!}#uPz$3 <*>ўJ 98w1xm|jmLw' űCG/NH;]O"kMƃ6@Ɋh@QdrFA:mmx iR~U>}X5Ki^R F_9z@/՗OfPqT%UXԲSȻd].&MfCN Ƴ'GB} !+G@gO: "B,,rpR!8\d3˜; in9[fitPo8a4rRn|9n͜.fmMlʝ[4! c1,Prnhz oh<ۜ>8F(i!B]5mhD`ǣy]׎WAvdžon#~ʓC)n͡s߂Gü kF|J\Xsţ>Kؠ&}+\ ]Mh/d7L-m}_}⋁nGTf?b^zo j呷g?ӕ߮3 E.~ yDB GzRpG[٢EMf(+o%-HC҉9zvgo+] B1uZѽ$t]kyg3ayӽ[8-wnk~]~n.՟~[{M; ׷KJp(KzAﻟ5zRhiгV`s#n݌'KRǓg˓>׳gh?vm t?u,*^UI BD_9/&[At1vn; gwDzpTQv7ZrH+JX"h tK|Frfޡr]4f`:DPpȅ`I6HP'B!sr,A˞ԦG dBI?V7[kl˼ؚibCm67)}Y,0~ZY&zO~/W8-7AaIzyu7f'7٨qUw.5?/P Il^57nS[0>MYV) qY^o9O㡮焻(4,~<\w?Pؤ KXF;?H[Gw=,,rKÌ$So?}?_@z5_K_\jyA2u_,?5X"e7}Ixe/7^^wuŕ68[RjD~w}hxi0q d>iiι%s0':+$zS7-sqO:9}$c9xdNTX|\%\nh =Y\[NO#H|/ G}TOv*^^>ųO|6?V&i sUĒNUk*W⚯׳e?*ָro˖ `FGAEpv&Iq|(wݓ[C<%pc$K/CiBgT |V$b1IH4d1 B{nhvZz8&hՆ}݅K6Z' =%1(6 1ɋ,䁳Ȍ4"|8i\c,'Dk⑆30 Zd$xˠ5r6y5 74NߌS(sRނ#oH^RViP CQ[Mb&IKɜ;!H')G$)֤r<5AJ`:O s.ٌgMA!t}B者+cC6X|RAZK^KGVN`Y%n|p?sR\cA'c҄ D u"cEɹ3h݈9/Bu2vD26Xtl/ kο 3s;Pksedz0/}6N r0 pl.Úʪ#(W] ь`rD檠P"Dз {-cnNVt ٱ=j0YR2d %$9sY& c$Y#sc{-}y$Y&2 ]7K2c=[k-8gLPNMF{>A%ai!̾ Zv͠\Lc]9XŔ11a!(o:ݛ@WзVgR@u;uo5\BW;âCe/?1y9~绥_3OC=JJǕáGRç0: iQb0sdVb@"fdY2!T1{e`J3A[zq暄"*[I+^~KE!!JH&a6]z-r6(^Mj2-8)Nf'^dB%zɄziU+ox re)4N9eҤ HAV[Wa1)}dpЪfr絵)K.H`* "yL6g sgRRmk£(Etak.-Bƒ;gf+}0I-KfLoq^o`02Og߸vJ 1L,0V]0XcӑdAaQ2Vfa(OH7)dK&Ǭ˄3ƶ%95v ӊqy,Z5Ukv`) VK%7+)L`05#+bE.skͰ6>zLȐahH$dQrTKpĒߋJE}5r6ڨ/-&E1 m?vՈeh;i8ްFX`d%t)8 <O$UcrڧrƴQ.ke2 r!#-F͍D#YC,x-k٬:&_gk\^e:ţVY+޺[\S?H\n7oP}7g{^;~l.dMWt{еԽtwF(Nh-.?ok-&r^kqЁ!ŅJ.ʵXKމxjy?B 0+CeUW3a7t*u r{*qD<[T4I/UXidMVk$Xynn ˂`f'J#KHLj,pLQ 'J[>9I'NFbT燃F88.|yX/s\9ß;~}*>&e Y-xhP$|>(ŀA {cٻ6$U 6#!@[$]cNF?e)Pv}>!iy(mMTW_,&P9]}T[wٓU dŮ'GI(Ԋqy^}\pT>NB@MQ'R%eYMb>w̲#KhKƒr Pmh Zg2Y!#$D 4g0&yJ[%t!O6|8Z*ne](N~S-{fdj=|"ib$.A]ʻ[ףɸ]O >Jb TXMdᴈ -g?{y n#xn҂Y.4\ NQ㥶T2B٠^=Dђ ml&Q)M(>d8%傩DU̡ bf 8~vF΁%>fޅb:~{JS<|>Ά"vEywɸ:}n( "2L։2"$;ESsF\j9Yb"LafMwV͕&phEJGe2e*uٿY`&yj Wm])|XvEoY>eiтY"]=Z:0*WBD ι/ ºCR LϪ&#-Kw^BDRqc( Dh,@{@Q!1Vz#08gM={EͻhvrF]e#AZݽz(h|v[GWxE;O#ϩ.LF+Yy-K=:^Gc9I8U`(NEB'& K+Y"O^!9Ѡ'jJnmrFjrHgp"O ǫOWtW')EL;ądXdERRXkSE`*3@+b䎳htl?4݃tNd0BʠB.Qwe2X rDB=羏9FDMvb}~q:_ ܧoUj0s^{~*&=^#\j0SNz.3=k|S)A.bIKH  O |>B-g V(lBEA9Ā 9#M0t|qH%戰|x&WyGd=Lf"l-@:?[RԂJZRbyóWE^|i 8r!4+bq#534 BTHf$}qx{]N?y?|%/ˠWKt}=|r=OymiPݱr:ߵr'՝9waT8gIsVKFk[ɷr78:eUﭱ׻9G-]eѳECMCJCyr:jRE￾_w?[z)ύYC>oO[%Q:Byx"r''j> K|`q jOn>8ž\'&Nf]!'ۆo~x?o%tj) ^mA8AӇn D%pwm h.2Oڍѳ)zen7-kP# %[d ˬk';";2ںy7!PQ0btёe=%)[l8ΞYm9[)nyyd1-S"392 \fAQ0z=cn9ޛ.`gnLP$S~8.,1AM,g0[͈9Ĕ-GjnDtz|P|8Nq[ӨilB`/)#A"M'3q` 3NpbybWt`3,3ee'|tPI2>ѡRA43GK7`}Ƌ@Bʫy"5vHuBYx0JHa$hNYʴ/@V9% 1 IXLF D+`=0Vr=19n gRXp#(w|=_88w7xm_wHûW9Б'H[Nnf+AtHB3YEb I,0O '{<ּ8 Q{|x;x>nywo->Sީߏ>T-TO\-!U0:OZFg\@\,/2!E%ֹbArKwW|p*xp=~\oDRe>ۏVRG+3.*HY~ uV r_S_|PAuGٟAݷBob$/ǶEQɆFڭbT}YA=Ej8^<`i՜Me-|/o^ԡ17X} n6Vag- %٠=kjyoF|^U:~n/ F%Eێ[ށAFVs৚<:APsBQOhr8uCIS;h`rӽ P21C7D༷T,NH]rҞ:#$cl~E.8,8u6awMW8 yn~ȵ8[?9U=xEewm.S9 ҽSNQJU5˭w%>+Tb}嵈VE]XK]zd "ppu,(ijR$nӎ%wT;vdž۱ݩU(OyOpdZ^hfd"b^qApK87yD|JY9 یDaRkF-eD.4N 3x%LZGmiDѱi9KizB5!]ݝgJ@3_o=EaG쀺iЋ#c-ec:bCNoJ8HTEAsp09a'ZK#wy6=kU1>|BF7%B-- [U*cy B;CX!5Ҝ.P\$X1T.cw`N3/C؟q  ',hd4h2Vj&FԳy;=?j:d!()eT\YN=T,9!U$#KPIH$_H.\MD:'QjY<ިH8aU I$$CW]%=< % b~uW,g8.G 0JE9g'mRV X_lag#k.?oRy(BGje_8m,(fHey,{?|}evj_{嗤ҾTu[!W Gv0^ nm/W0LW1+`(_]x;eTN].p_럥JJ~k=_UPؕ8ġ a:₾ƿa.c?r ]^ }x$ٳW5heaP\ZgSrv(Q\YµݞsTsg0EӶ(ʎ. O/ɍ8aYNx^1U\3GO]*w\β;x>*3Ыa&l__$jbE5Ȓw5eqWv77*\}#|&Mes ~AQA9jcr ޼punj#|~ǯe7Hf,5yWHJKݨ)sL%^D@撄9JQ$%SRGsm-NQ?N4ZOcȂOhƁ *,(BP* n5}bB{;>T*x!|0jSzC V[_![/C %' 0q=?Nʯ^4\*@'Y"q8)Bݕ 9xurه5e2 Jr" PDoQ $҂AH1d]eI6:AI kU.*- KC$Ec#dHюA_g} j: nh~vz6NC'r&TpFXKH%d :dJ]x]үZ)F|+Tr)9X`&K MzW}@Pg mWD Nj5Ʌ̣Bz H3h$>Y%vhI)?{FBnԯ! =#"w|F?eɈCT%Y(r(3͞_WUWWu2vvF+MI0Z'2&^d.1&э!EzH< ƒkzm{@ۚ{vurrp+j4)U|l xR* *;,.?. s%ϝ ?[lCn@ĵBHzsQ&eo@Ȣ֡) fWIPF{RGpAyR2@(y]V\g5)}Qw֒%B0_mm?iթ(tH0v:WZjP_r9sx/BT>}%C3KU,Q.'}ƆhFe ʕU#J:ޜح1dg1JɃ2dR;28LAer#qS{+9;ȓ&5gh@~ZWYWwJ~-`TRՠz}:Zc|鷠idh**2u|`cb@BНjDp 8Dr5p)W گ><&/#-gvsWE֞H0nxTUxF_Gis!-J &rεS$#Ā,5nYqb r&v_*f(G&."d+w巄^DQ#$KHfc/"q˛ڿKZ,O|gZc/szrW?= Qm~ʏd9$tY M 8ǴL@NEi8d9RUVQ[S! ֦, h "yL6g "Rm#cklpX`"Xgle, wJ*Ln_afiaiP~5|@1c"LYTA5Z;AHVy^beV4 I0aCMM*hpd2u$TN爙EcێSi; iɸ<n;Em2j; vYCA 2 pF1ωE.skqA=LL!'($dQrXJ%!eH4)5rڨ/n`<=-mˈh;Dq43(TT,0`ES(b c##w<@ܴ\YΘ6F%pRVF͍D#YB Wõ8"~4Ձpq<+vHָdK\e\t.v3WNFV)%`}=/:| Ձn;H:\. !o喧Ua-X=m#&.* |pH Ίy8䴔2iz;OÛTf?bK/N~/t/~LCQr|^oBf_$q~-q7R>ݨUvv"'$Є$Sz#q-gy?ж&4 .pt(AÀDNލ\ݶYz7<7@8 AkQ S[yRZM5OZ58L zoy7TjZrg,'{_7sRG7τ72'[#B0U!WcBSWD%笃WWlۣҮ`ŏG=*[ Jtpp)#ט'xaz6QkeiT.W p%;v', \Q:\*•PbH{ww\Mz.m- ^[9booniL?7>NeR3ZK}(]p4(]5XPP䡣4QiPN/yo'Wu=0c1 @55BL `8\..C`l%Ѹ} ?^-## 2Xs4CZc~9m3T[if}o_Ҕ2>2 KʴX[)Š U 44\]]J}*Ş{R_1BG0hZz2JDdW(]@|RVFr :qϳ$uL -f6D& $>v:жy 7o\԰^tZ]w𝴬q5MuҗBu]B'|vʙ#Pľ[,Ԋ&+TC*= f̭VWJ+Ei5۰\2@J_@Q)R *Z+sByl`aK8HuB)lEތǮ8f wP]n,җ\ZFXk| Ps%se/ד%8)KVhD ϝK.;%'K@63JxdܧYe@]1yTn9y09Ow@SOa7OcgQ%#]OWͻn/AȵN uEh}ץOv|Jdz7ZWԺ~ҺjZs0~y8~4eE-gɬ7v~Or=? k]_:}ΛFUG:Nq>C.c9{Y?:O]yΝKr08R5I,{mmu 饀l|Y$+UaTxP%m`PYr^DM,W[c;l;v+jǮAyv;v+ZV;Uڠv<‡iMAKJzLL(B;=OE[5Q` ' TN`]T[o:gh9 8^j/FΆMȺʎAwgXђB#W(ۛ?b[nmVѠX1-wz̻ܤ 1s/a".3 .pc*<\Jl\㊉!]Y`TiM\2}^NW[}j0.5|c?WAq,&߭Mۛɰ{?M]꾤z^OF;P1^?MR~O'ף;-*ieH\כ~NcS>@xL =]04,a4^AHp{b,;kVxo,_Aƭ#x߻e\7Rc$)OmOQGz;JW84ϵRd3?j=P}['tI0N濔a"=44ڸC2>޸N%sݘhRCxB)]57i5YNJ#Gd,iRU"3&d$&7}&X~>?}"1jR><+=RQb2˞I\x}VS!>W4-d @FcqZ>_b>4ڏeK -R8_/TWEpKԳgF%K̼*N2NYܣ9_mNrmNC JQ)3`b>%8$]}tܳE3md6. $bdG!Q(ނZuZ%oJ:s&2wVc܎ok{@C者tD>TD电Dpd% k&!qッhIruJD-։gZ6_a$UĻq%Q\~YWlyj@EQ%w8@hv>q1x朗kF2H'\ ^`F c̱cupP_PGkn>}7na[]7v#m L8ټVlB80EE3VXR . [F%c!"aBQHB(6fVS$˛C9GHط/<T)rLFa*ۆfvzR▱Ց?G9>7 Ŋ9+˭"<2†ҁtEfS5A0ɵ r B($<iK<sWLF4"7@٥#.I|Mv植Oݟ/R3]mQ+|zD4LmXvEЇR-nCB|(ٵh>Jw'o-[>o-whWڳ&BUJ1Ea*-08kna A0--[͒!IMf@.(7 qfaHB9 roDf˜5-PTsޅdlddNv)1|b_/fdE1d+̽ІHwkЀ5sT ŵ< 0jh-J;B))&=)Q[M*FfdHFR1g@ˍ̹G6 uPdB^pXxF<1MNSֽl\_iTf4| 3GldA#m9#x@ q ,&+2(s[gyO !9{DlHItY$R:O LBDN܁'Yv2 .殠vѱ.j̨- j{'O1 dke09#_̓g281CiGYRyEK"8 c!30傣A )Iq I2 # Ҙx͜x:)qgxl."̈ "DkKdEHI0ǐE՘[iMۤ,8RJƖGe|Zad5(QHLd$&3N x҄Ij$f.eFlFď~j ^gQ&.Rułw`A0M,ƾp{¨V58T`͝5t( \<6:C'ESggU< XяRuBmc_C]9!p ѐ_ D8w ^<\޶[]'ϻY J}ņ;^}ВAw7-0fg5zt0u/X72%h\N빩gpT{^"7wm ٿhuQfqRHC+KFD߂l?DmKm[mM읷!(ND !8DQɴJF 2FF'E|)&CD{X{)SU,)b΂w `4͜՞.`b=٭x]%U=O.ƭ6nZEm{ 9̷MS QƝ  BXx'rC05ef.оml%QYMV{0ؖ5z-1#1D?n$!RO8ыш*%8$qjŬ"ekk؆2Oײ% x`6KmEnFi a \8EA)帳vkKs|}5˅⚶継Qd>Ꮐ0;%((&Mw KLt]8M8A?eQa+)c%║]Yg;H?-KUMʀXY.u0"*&b7YNț5L r G";|0z)#"b1Yy$RD3lSUg.O>{^F|V}Nе:V,l@nڑm#00'¥|@Üg0Dicy-0Zmd,G'0Ŵ>oӘG"褼 62,wFm1]x0Czʳ<'i\Ve~ί=BzOŔ.+gH%m.8xyh<v'I &uDV *}!6;˜,NJ>W=y$ӁKC!fH p/oj0PsQ b ml^'0W'3S#Uv'j! uF^zH+2.*I,#UֵJiN iFI+9Jg8?a=6O) ?I)jX"VY8wV7 笑*`£bx'5y,RjKD&"jE|͏ޝX XXk)ӊ.6}*ju'ED 2p"!̤AF~bꍵER"LI,DHc 0<0X/CiHH#XeLQOw0Q\9%C߮>z ҽ6p8" 0ywz&GvKI'wߦXblGb*P{"='S4QXD4488#,wJ:^q6K(G}F9iBڴfNwo޾H'tu}c Zm}ދM43]647mh|aC̆ UVdڊo޽ǫNg~X'g?~{pcqIgR(i̞Au4|`p?Of:Ѓv/W5|98 swNM'eoO?~7ԲMW3H= =xt#0v%›-v-G/OzxBhLJKaV T 1Qnj:@gd,urx/#[eWyfv0WiiE@G3w#zp!c{˘O߅0I[D hau\75.hXw2<V7[lYݮJ{W%'_f Mh;,̅֫͊{^o7 ;YA} e [A}{E͠nh"#syǃf`{;yIF__/;)h/ i(; cGT3$1FF) JsaLZ O /"^l>k8NZ7Ch91.b)Z r-Mqab R ^.UMJYlccͲȋt2eF^XIo8J)$ÒT8I ax+rsEn"7W\aZaZ\]~HD$a@7}E2Gv ?+L)59;#y¡餾||89q9%|Z;+ La^ DX\M]#3j ˎ_ؓ88_,?,8w ^dv y8[ E =]aw]_gތΏf?=qӁKx׃y?wQv^6=.bqp99禦_zL(E u̢Y1:fQ,Ec{M#E֢Y1:fQ,%u̢Y1 0u̢Y1:fQ,Ec)Ecu̢Y1:fQ,Ecu̢Y1:fMcf6B]0Ѕ͙qajzr:8Qs7UM8OCGEL܈ccRd@?G-OPM'gg)r~j،Ύ ~0FЋ4h.kG͋YY(QoS ٤شko7hӬk|f1fp<}7k ]cX6jdṉQ5S¸M.'o6 m$kN'fJ{{yKuWG)Py= ~6]ZvK\ ?6?F m\v.e([r{R4vw|68sb.BE1!"\sb.BE1!"\sb./`p1!f8R޿y2'ԮhspIkonH_:/9Trr$RXQPAH}H"\*ޙ_J 0M"d7TCAcVV`=BՃ߬,v]9S`Q3f4 L1BUIE)3^ݣUA~frȼawLt#ƟY)H[(59'uʛScYxfX8V&tLh3O/fH, 4糳q dZ45Ej@gAç $}+:^EmQË =){Rⳡ -4W3kBf+ 3A*`?st'5Ig R5.ͤ6߲F'u>`crj.}2FA^$=Wo&'?]Gׯ'|8=vM[FbO+U⻝ii6Ԓ룭o9'au +5g]4Z~uN#_G/rsP4 gu;S;.W1+Y:fuW2/G_Eӄ! vˑo 7/gyKZ n͵1yv[ah Wvr~Yw}F]t(xM/*£ۢ kiu:x7y[Ӌ1_>5OOg=XTx2@+ߤU ɡiдbhZM+rEȄ '$d"CZƜ@1m`L/1zO>R'SR *"%XR :j]R瓄ٲek$\DpeӳI]|jkn\b ]{H0f송CO1qwNƘ[DKZ\kb6G1&a)zwcvkgs:}7;ya63<oI7s1s$6W9yj۞:Xs[|uđ]xx2ZB+%'3Tqԭ6L0[,7F{.&Ӹu.R6.PΠP!TWRI猇<ː㭘A_]@67X]W "`NZv" Ym8_+-C՞4Y gm_i^R[Itb)r #$]S2zYW9r?ִ a,R#/pfՊ7襶Aĸ7T2"[U(Q4BdAWv*4(LLE$3sBKI"Jam:9 - h0һ,xw*cNtj}x<khCin+ovf^8'yף9K3?9T'ϵdIvX\;<;|;ŦO߷[ݣf, }ף8ɣ鲆C\*_f4)/ \}ލsw/3z5 ׯ6MK;~?P_[k/q^T@sbRݖ82B3z~eoK7M$_D~\9Zաyda-A^p'G|ǣs<ё|ێTb'W?,&F5<օ&C x{IFR#8ͽvJ ɢup&՚5A4/͝84k޲u ; $#"`p&h͗<'nJ爵yҾZb[|xX9,[E{G[e٣{|<] _Y0WSZa1;)9g̲5DzF?]0h3ՕTّJ(A/+|2so\ n~~ m跋VEe  X Yx:̺Fc$ [L߀,Z_4Z*=/1*-BV 4)&zBDYZgA|s* ܋y 2?ԴzC ֆzZp-1;}1N/Q C`X^rX0RV *XS6].k+kSRO! ){$)!g[b)1BRmF/ g$iBf>vUy#,җ'T=xv^yė(bt1Ӄ)Y1`CiRĢE*el`\ Xc)DD4 c{$c ]l k|mw+/Π< b_-:|O)x;z'V:CaיopԲAW/:^ՋqSQNˇգ?&3z)߽~ Ry0* Y.]̙f摉ݩrn7ȹ[-{qy_|pmEn ֋Zŭ7yNw|5-_/uY-Uߧvu4Z4ZmU{7?Z`_LX.謽\+O]&NF|"bYNh[u"qtɻm@X`489;L&>U!'}'ٙl DjD2l03zHTSWOV?DH:cCl:աDPJ>P9+ᅄ&jcbQFU1&ժ㡀e@dcQ)9uB{;Ffڧ=r7o+Vw{ /%=S7R{&KNP}x;OǴFw5@lI8h(Dِ$BSQ A%G~@C_F>,8<6qza1Hҩt)@C5-tC_q_@ I7~*MY?w_Gd zs4XLj-v=cf㸭ٱy?{v8+ҘMcۄD#sȉnzgZ[|-ItMP6Fl.f 2"L>X1HDLUy=]_t) :{=0A/%BPIüo1@;|Vc) SL raЋECٯ>;< gq E:/n~|GrcQ0C$nH UlOL +*We@i<1|fr}z횳w?!uh֩jV.MUdTY 9lە1t–^WyPrO9UR+w*WöĻRSMp*5ϸ{3`=ioGe/_:YCGE%ų^,V˖s (bfE*1 |_9ft+"đMVV-3B:NcqMQ5.WbK̶DpIe=$*5;Fe߹4IպA8kobɕ߅ee E/t,:. |hxFjVS[.UGgl E'y#}҆51(M@$Q-(z`-tIKJdLz<+R8۔Cȡy͢ggsL4ђP@N X] lM[\,SKd'-qXї#|Fӱ& `zNpgfe =wl/ƛ3ڏ&ϟX?sp- *f ȥ[tc4P,"Odb‰[čvlG7¥pJOcZ9" 'SzbVH)T7hlm6O :]=nO&}N&ilqNgLSnsXY]w.$a,ʜS8{znkنɝrLرO%4UXăEN=U?5:g sbPhʥS ¤aHCMcYGk%"Hc>'〇uce! `*tfa3@(lYl uZt4yO'\ d4}-liQp}e;T]M1pnu FCl Ln3"ƒ@B@:#A,{ EA$Ӷ"FBJ2'chr`R^;tQ0`نK hWk謿K'p7_cAUyIz,۴,IC2? [%W1y k\Fdwe> RCNއ'POW>lE/<)f;9_| n+t! ߾[vmٖr;,1Oý0c,v&_>+3^&ы® Ap/j2ݽb-W?6Q3ٶ(Iw.8'vuL珲_f{g9AyP$?T_ $޿-" /%aLΥBq1^gƚ/=G!XKr0*K?v)XVhcWB@]E<vPUgWK:v-+1+ S9YtG^&*of+ Fl~`( /)~ m bPĻ}U{Qz mFG6&_%$Tŀ0`Tň.<d}W#v7*R 9ĮBeЕ*bW#ž%GUJ,+<6=O"YG14 cN KY9 ܲYMYJa(2^oM bfV:x <mAJ]=_,v]lL? 3Ŧ[A2`g.l\wo~|^56W-M&%@#\iZ.)w2ϑ$S7PP+DrgcV0- u qߛ8 p4ۄ\)-݃.%_GAr"R',"h A+ՆH4FA XO0US=Z1zTg\E65ڰ^E{Yfmzz=a`{ ْtDTOoѱ M ւ+P9OFʸE$7.7Κ@E8i28:,N$LD%'ԗFDA:tNpŬb^e-ĕDQ2Кx<=]LoM0aFMn&SwiCcҫAo:1NedL~c aU~$VF &k`.JKX4"T 1JfKuox֓`amvy Nr2 D͊JOl1a޴Ӯ⧍H:ދ+-7;;Y-x4Bbhraj 5FcRS9Q7p٤V ǹPNH*UAi֨stgQEB7xv_7[:)RC\2d5\ C2= !xƘ"H;ӋhY:qӫіܯ5/lkźqC½U\h疡 ܭUyj;~DXNDR?`$"Kb|d[,~V"Fy[A/)QRYFmVyk!RXNPA&t2QK iMJF.v] {ABBTc8AOFa5x)-t=|x0̗phsWiLDvYVX(>wdE}:0u28I69᷷풺`x=1#BV¥psCZ ~ʺ /ʑ6k۷EͿq˶ڜWŶϭ\"hʫ¶3 0U:i'AOn?HpϥI)=6-#K4ػ4N4(ac:mk m ߶+D[ze"6s \!#B)!6 eg)leo35iPCڣPn,_ U t~"!ezMc\[C;=gZhFvUV8v#iG<0#~[+IMZwJpIZXHtNkN2R&^T#J;T* ]R.Ir:nAYB4:4M0(KH:A.eISv)ڞ*Ԇ'@كu*':ŤVΙ? <4ImƝ S0B (@QZKHk%{ JbT<5b-;ZCa7?jo2 qTdMj y< ɭd"o ̨Jbv B 7=M3E-]Ox ɕe"vHEEh}F7>xCˬ8:Gs, v7d;*U{|py%0W+߮Q-~=/\V᠒RʪkH\~9t5u_)Mn۷r`5};DjuȢ]RWiyokS0pƢ A* AX K=lRo-cnK1Ϭޫ-@bꅷ\+ۆ Q`ٻ6W lu .>bvAJv,'̯_H9QjYͮ&>vպڕCܜSK/uڍTziObMŧTPBo9C@dk9Tm'ۓYlGSvWXpI KŮMX&0ʫÏOQ۝xEJf,^9D MAuaj8v'Ӯ-xCJX)]#Ϲq,AcSJ22[Fd<21g -H6j'={kQ~-fX87Q.9Dܔ5i' B1ԫZWDQ!e̽ bRvG\ `m]Nf|/N[a\.+W ǘͿ.ƶ)lajp\Kҥ@tNF_[dDf͇v1E1v2|-m~Xy}7"tmoAtwJ:?z?{mȘgn[:: 'H*I3Qqe7n7 ww _ _ _ TS׹ї^sZ)ښ H\|DnC@gʺ/XJG!J鄱SFā cԗcNK1S̜t> mB|gZ{I^e[̭ 5`GTzQD}|2lǦ^}-"o͖mf90Q e}x ڕr8S>#2e]p,8iޱџ!^C5XH)⃴RbT#%ӽc[/%'ۮlD.l} 9<;kgwoK=ݮE{ zߋeL;i;Ww#l{ka%]kܟ5n C|ږQE hTfCa!%aMwJjvzmߞOeG[Ol"E:VK 44MsǪЫv5E1.{ ?O?SgI1-:)kSw S\`ru~f8(1Z"z:xմ:AQb |0SphdTlNsSz~6|O^~SOWM^IQyWnCҽ&GB)}޵PH|9]Z R\hRd9:3t1=:,9 Vީ&Fȕq2sQ4W 'ba   t\8 $be.Wzttp~#v 0I *buSD>;pRCzi% &bY*v\lث́g6lcH%WZ n=i$涠dTNvڡ>S* FV̛9Y%k(#{C踖9xCLUNFdX$Q8F[ IeMNj[c"bw:QGu$ 'Fw!a)%/XCgLw,b"Sc1o\fpcɤQ#&YĔkBwbD̜͈ڭfUwmuN%OEbxW*T~֚#:Tc\Klԁ.`mɼxb3@؄-_cCvbɹPj11S#eQk⌜,{RD4;Nf߮OߏԦ Nˡiqm@ٸ\PblI4mc5ZQL+w[8}w&$c#Y\SpcGG١ILC2hy7wf+&.EuvڑI٥>RυF6E}`psk0kqْ\.k\v}Vc TRqqX hӽ'5'o#jw]:]7Ԏ^FC ]-p֌>l&qUtfЀw4m =%!TB KTFQS96H=8+(`N ckYXt!<:n}ÑYܧ2,2 不ty۽4|$HܤMz_מ98]۽tO޿"CqoS,geeQzq|L#|rY\}5`XuiWn.픗."5.CyK\^}lA7?eZ%v8;; ֮j2U> U|c}ul97XL|ƒ뿚W$Nf֣ 97oߌBMMV FLe`xddiɽvZg.E Q||e~a屫9:&Î.e: e s!e]mn_۾@o =X8Ʒ7Nk0 ~x0<.'~o<Gԥ_?n_۷ALPCfse*4o4yHʂWGvٗ\vt9׿41{?V$Gn38Oᅗ.3\o~r˃Zμ_ƀ}4A&ql͹Т.v L-ڃv,Y`7L10ޱEWlg?I]–JCiNƞ>]WGjNT#U0REO`ɥ$i%]y\/>mR|)K-n6:f7l-9c஖-0> npfnY[ άe g/קr٧?L!2<#23"Z +-}f-RKo/Y d)WԪ،x^Ĵ*.D%1:IjvM&~,֦6c9g4Q.jHxgfĪn2s6:ZMO.Z؁U]qO?,58/53K/AiIrORj1rV9:9[]NbG]}Wȗir#;0xݗ۽W]:1oJB19GKXP1T3}>Q5?=(2Xu&Z=v;;VbM~3M:]y=/Nom)Mnv(D5׃< E K4#|i - Q\Df=H,66xC4.\+R} <͙Q_$mk.[2rdG|rTxr޼5p#n! X?7b,j7ڢ*zϩKRv1w˨L0`0 z"hzqvTE^2䔫X1X6/gC5LEz9$TGZpD`Nѵ*U{  @m*6t_2 zAPr `¦QK`6D-cRZ۴nIX;g?Z-&-K%LiVkx ZƐt s^OEnh^qٍl``ґEl q6;y{U4 SS]cy(ld;j&QZ|2XJls v{sƢC5naxRZ8#=/I J-3_ cCi'2v-ކ=J'#k[lu%.wTZe₲NSZi,dҴ>;f5P,9MiXie =4^]y*!VՀ$f ́LPZ4Xlm¥LiQh eblL+acr(_Ϧ°S]de^K3&d;"c#{3ݕUXL1Aη!0)F$AT3ENN=q 6,ݱVf 5bp$N`6L;kmO-0mZ&iHEl@YH"iClr{gνι ?%Rm*r*ʦ[ciT66% j6?|c Vv{h*3jzy G4GRvz(U'T5VJx5=*(I}^R6Ԉ &1bqѳT+D#5"j8(c e0>j2a+"Ǜ ~\uXgLӄ@<-np u^|[1#9I`BTRv`N!&x?6ô0Tb3J7#s[ Nz- ] &W$ZʍUz| ANKj,(t_@JPI"&̦Wȼ"a>X\8Lt~bƍmHm( AJt+Sx$[L-h`uvT$ 8U+ɉ᫘w%,ܶMxYI N;XoݷOe-n{c9TX] M("f_[b =N x@6D7K x1* 7Db6y.e}tbQV]k)Zc2HcBQ\;J&Vׇ3yy+?rrI'#&cUmQ1zF,pZnkT8t-E \5vi7l#: gh ` e@Ao 2Zdpb,p0",0wϢb!t",U@k6֜cy[!gѝa,X#4Ѐ7T*[fq/z+Fc-B@̤H/4$oYBeڋ!K-hLl8Hc[NatwFBi[1e;WK"k.nj!f=0pSm[)w`)jY:Z 5$޵繨)UFCon0:io.6|Tk>spx/7C|C HJm(X՗|lQ㻪f- ^p빊& @Lsj;ڛbZ\V\z1f%zŴ3ӐC/>k}lٖXgFv2݅& :/Ll2dQIK:j'm)%i;X#Us֮2jT<[hܱ.y .qep qxv~a0fvnk;M?`b~J)$$MܘʪiIT (-khc\[oY 7qE; C7 ͜gZn:d/ZvZzN1?~3)cp;)12fTƌʘQ3*cFę12fTƌʘQ3*cFę12fTƌʘQ3*cFę12fTƌʘQ3*cFę12fTƌʘQ3*cFę12fTƌʘQg[̵bO5&`0 ݛ0齩1kjL<_*q#%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*Dd'%jH{@0K ky%4@Q q@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJV|@0Q\F dr+`%'%гTY<)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@H tck=mۃ'mikun6N|A^N.Bl> #roKV.ʕcH zor RMj*/Yw>\5+p݄+}ǡ/|zY O- e֫ WR0[J#\i Wzv \Mj /Yfa?M= 6o>৕~Yqٰt(:~+&ῃ׿b~iFo`p̼MeJx1anot3W} Zv=L7+ 0 ôIY?0qi{-5#5-q`ǃK||3x*w8>KG%pw~7#W`/sdIjS p0{|v uyxq5xw:_9V_maWI)c7vA9 #l0eVɈɪYJJAk51*{`0loU3W} WZmw=\5+-zeOڰV`?'dz#p>kn/{_"G?8!le7ä[vײ`!U'RԦS"[#:5|TItx&쐿K6 ;׶T)( 5ݛ,}y_~J4e瘅|5K KN?:r=>kMNhږM+4&tvlI Y?| V}r\t>T>_tfG5[HQ+\nZgm7r?>W˯nKBkrئ|OwInō9]cZm}X}Ol46}Ү?\iNOERpy„R1:UCߊt2}mzog < o^~Ms'sn/~ ͂f:o痷M70=<-ݞZ_@9,V\k#ZjW{4vhHio^|zJ.^o{H%t7yUd1u36t R9M4mdcR5sET՘YjEАC!jT+'( [{i2yY0w8tmz_d`0R0xonöhЛ[Ôx7;{Խރߴ(nt~'^58x&bLJ5:glnWb~9Ʋ˓{A#x]ط~1e|WU,&d ­Y+|Z"^ |^Ha_H.TGňc#fw- x>[]UѐW!^ƽ¼|Uv|7ýπWVrN/s{׸S>sy||")jtyR@u`'2)zNSD$M%ʪ%LRVpDiY[XW9%5v"reԛr3xFs ^y|>^ r/b[N3-1K~Yw:~5BHW^.AI wxuki񫷛μ_>ƱZha,q 7ơk& ַOh?+spK&-hQY>rp2m9 |<* \RGdܶ}oL~>6''ӏ_xנn[Qy./~ 馶/7J?ONDžK@6[v$kDn&[BZOfm"}뮎ڙIz?ƎwY2Ʈ;|4s)DbGoXvb$^*xUVMYU v>ZlFp+wrDqqֈK}sRwiqO>lq 7Zc}}>Og/M'`xGZ0|f ^}"2+w~5h@3j@5ÛoN> 4_ 2FVirR .tu*ڨBv|5}R^_͏]X7?]\]? B7ү֏H|ToN`R[܅ ]0AGWP9=!Zx~QrC95!+SD="4_ܘ' 1ۘ㾨K^'Q$3 Ɔ̲ kGVXTeͷϷ>$ȽIo6mG H>>U[9Hn؁+f(3"| x{x\6bȥ-E޵6n$BM.n`I #K^Qάp}IJdӒeИluUU,Eg4YfH)\BpFJ xݴ|^},.{ Jqg(=THUf|aW=5BXG\N &(m>+!8j2nRYPӱX훎5ԇ-mD=2۽4 ĆyAdOF[BWp+rZDPou-͘ꦚelv$4mh^EMT޽=0 ɹ<@Qk;6T[\os']0hgI QG2fLPj;{/+m ochMLUwM걻y` Xwh׽bg"h }=6HpӇ7]P|& Q Z^R -gTPN3,Hgzud W鸬O@>z4Ϳs%C;jhژT,zi3AUT' 1h KF zC(+__=Quuqk.nWԝ˫lXa=6 pן44LU%LCz'MҜ{j{mK~*C&W_NE>M+\Ng7O#XЃở'zO#Wgߟ^}4j<,'QJ9㺨)S#ey[YeD8:7೯<~Pgㅬ(P zp${ж:dAwfi&y9-_-x7Z6ΊyhMgsh`wv MF?*Osm9F[f[TLhbd4 Oy`ɸ@#.~E !ooC }$P:_5מ?gnmM.@ Xl&Կ9/31Cغ9s*J TP /_ %Ϯ}(I6jRԗ+'Z2P}L89(IGa`#=>J=罥Behu.=fB9wbjMo y[w=sx~:Iۙ~myo-|~tma/>S. [d2ztkXnmBIj6_ ^l™jK*<8YF$Y_v[ꇴc 9n;v/j>pv^t;U 4Tg/*ъfF&"@* sq㝇H ̧Di1ؠb#. ZS4j)c$rqqs.AE`j|ԖhI=ֽiGd]GidJRp_~Ҹ})cet.ٛRg(h2f=ʊ0Tkivl˚í_uPu.UU ,< u‰dMiƋ TRQ =({z*ӄ 4!ljug\C[[ғDF 42yLQg4G+5A4u3 ?ִB .QE-SpSqre9ZUJ2t`D⇅B'q5|"DefO ^x"^ TI.$5: '2)wN]%;V< %M_-uUv&UWETVIvܺM;cu5KM[?.v?=1>PZY)(!Mbne6z".?=G%\MkW'khX ow`wGide\ &2|eT]_Dϋߚ3%ϐ9r?®Iଈ#7.쌾?1c/9 dM/NDd, 5heaP\Z'WB+QK?9]ؖO:(;RƲw@BU%m~r%W|In7-+r&)z|O]ej|x9[Eݢ~狿VYA.;jCm꣱DMhYq~9PYz6;xCW0MtWȲ9pF5|G52/=y6RO5/+N7EB;7Wȗ*nC6dT1@ze!K(Y DbDYCR4ZLLIej6fB)c2!}sY 80A3%\*R%3Qq-ZOL@s/磂'ɧ2G(}!G_!C % 0@@˶ӎ}(8+Cdn҈ߨgACplC'r`PAE]&P[IND\~ E$꘸^Z01)l F'( Dsͼ Ezai` 4tzb, 2ڳ9ەyuU8'NS }%fH%d :q{՜S s J9bt^Q5Kɹ3!|]hh֛@(2Ovݣn$*qRI.dg7 k@raF#2.HVG{N*^Wp0SiK ,AyE)XAGר'tqaAȨ7C( O7hW= x3hp;H]sh@U:|Y؊Dt"XU2cx)w$w4 Z>Sgs!Fφ8k@T6R!^EliYh`]B\ -RR :R>#]$J")T#zjAy?J.9թM)tLHt?|ї<lW/2{&8>/CH?2!H0_=2!\j>2!ZnLHRL'X&Đx0K a]G7fJ#)iˋN]@h TRbK/-3ov'c}ؽ>҇@ -eNj QG!}>\)Q)FXެ58c?>${g6|?jA~vXi[a,LDFp%a|=v ЊQ)KA*$RAՠ[7g|EKGƤ<'(*%G%@!&$|Gnw>C O_>%qReE׬QmM/ IPb3q:i# =﷚ ڲ @dd8.i-cf\"xĄYE | RmCnfo|OnF='fU_:|U*oyPu&# +ţBiYVn6R,*"V*)=oeLL2=L0Rd=II8 H(+CRoJo7WedAeZIu֌O'c m'iPx~<Ķ-LX"ɘ7 z "e *P &Z>8!89ÓiƞpQ6dɕJx%f6ꄶ16ª Mqr}]b$"{}YjAjzh&TV@nO+޵#EȧݻiE_Afbއ`GE<'_%ٲǒeeKNR]lVXUe$ X@ 8m )͐-d E"XI$#kaőj?b.: ֤:y[&\M.2U`<=+]LjzDq[ $1-L>G: x`lZkUQvށ"[Q/ƂedC2d*r)%-Ѫ`S p;#iDHԑqql_xHθdG\Tc\=.))J@/`yߤDeutZN$2u!o{#Xz\. v!tjl=@X=M:G?N(Q^ +5=/kUj= k)] [L([WE*Oף|«<7ۇ8v7 uo*CJoqCHSzRg8Gr0˟jN/|.}|j>rF Kル\n"D&K_PRKr/V:MVm6.)mQRD2y #$ IXeP|$)Bn:sI` O E.0gl&][uE'mw8iI vwyCÏS1/Z1\W*؟%m:K2JiLMmWh*ISoWleNmJ{T* U輻̱ 1*JH Z7Iu#Av53x'IUݞ&aMЊ&T4q'Z?1!o2(TB&!G*X4c JO3g'Elp(-`S^6ZmCwtGFҦ5^!\jmMO J5Iן _ο^̯<N$A$a֢I>dWG2?(-Ӂ٢zZK_ ]ٵJEf^rvؠ׷nަڭzfv+.s^t6|j5nr㝾'f'Od\ˣRnQ;XKE9S}Sã|Z# Ƈ(Mx})]vDKdrrH L2\ % sJdrBbRػN`_tl(ov ](R9Rm2 sEʃ-2M:ZQu[RxHw LA&HJTVZWU"iAI{9g0BdίY:鄍}'&ف勒0*F%3N x*%eLHYE"q\bH8Clu:msQ@ٕR%R\52vFi*aag+PXW,Whv]5Ie9iZtHx/hp8^Lgof  Rf̅NDAI2GKXEx,S@MFf:UC5BBPaS FJmaN8_XO"$*"YudfWG: x$LUkJ!v\W;M鑭FҨcAeT2b!2`ؒhUFw4"|$^ȸ8]<$_gg\#.*1.{\%QüoRY:PU'Z:Yu෽,=.;]Cc~ bGq n ^'=@ dpv~>r/+?tV$V&1j2up+dZf !o2^bDUf\USJ .f qPPRpeap!៝X.n Q*-jW.Ny?9;LyHu\vZ*"C^X0+itllAH ._,G}'띮eՌ͙ϣkzo.rm?l29IkHmk ub:yɻaDP`A tAsqަgB4Z{<.۰T:9o%bSH%b%gcf{W,M= V9iAJJBYpxX Sbh)ҡk! u+HC,",c2!TBELj]_l5"=':N'YpF/tm~2^Ѵ) K38U>HwN^sW|bGt3xS`(}jh0VkFڻ0$>|Ǧfnj8g߮ƭN0huhyY;5pn 0:Q=iޗs 7_ M{8!}U^D>=:zsVkL~OMO즏8780_ggFfT_ >?xa{JLR"uD<^=%h\skbD= )]ד錷f3l#cGI>BHl,@ c'W+n_A^[ Zv16(_rUhv\ʲggx6V^/~ ?D+Ch,9?X*s׳<*z yڃک̲~f5Xs瘣՟ߤTّ[.Gj^/+|odua59q^FMDHj)p$Μy9̙ 1EkGEck'/f aI1"Kqޣ#4Q3 FyAX+6&nw}\ jlLs)hKA=wZI@y,CmJ8\:r~%06WrP6B1H*L&CG2`$upRfU=ok./|4K5oڮ]>!00'¥8_9` R:[La5Rh8Fsh5Z"jM( lG@ `p'[j:Rc?o<XwanoWS\E[BR <,Tq Go93&h 䰦ΙU,V\yLB AHBGhMAHʠ0U#-R)לDb dh`o!$#" rJÉsN'*[B7zՄAV KMsZ]29z&. 3Y26n< l]q`p%?MijU22MK WP-f. @/ 'JȬ_@H!mM(4D7H&P艫qf|R!5^N`hRV+28Wt=o iMI@QJv [8Cg/U\μea0FSEn .= B"_DX`xs 9N;+@`<\p<Ϭ퇗秇nCY2M!fj{yU**6ҙn# *z>gkW%T8[W'K|ter|aݎP[W膿N$ K=RAKs2ΨˑɥrAuɂv]8Vk3kX(R99(ݔ7׺vZ>Cc[ ޵հVx ]b6 c;1f 4,7kM n'u./~U`90ów?ŸC["`맏aT?_L/cYCiPɯz4;i!wgUt&CLi'j&.ؾ޾~rÍ;90)Ք gdQ,ڝ|+<+9q& w_/~Ã? 濆F. |K꺾Ͻ^W췝=V;c8cۏbreX<uvmN , ޲Ж鵋m  O,Q[8ΆmI';(&}(YS:£ssN3%ФN3fc %`SM7'XPVpC4<va^N-LQ8/rsLxOIrFUWP])"{Jʽ -G;+M1'hUXQWIQWIZ`Hɨ+dD9dj5նhj5i)َZMJc{Wru%[unc){ZqRDJR2Ԫg#B=RW doUuVd{LY+g"xޗ&n˸ym;=~qX-UvC* ?D{}٘?К]#ˆJNMݳی̛]s@TF51EjUq\R1lx)JUWN^?}}53eiGXI\ŚHJDR֚x:$0ߟ$$ysTW{x ?*m/դ;36z>Jyic$-$ `A5RZ'Wh2q,!͊i6u\ M 2LQ9 h|d)Q a0ug\Li oVXKO@EZ>–^>…q.IC⯻{F@X&ՖL*N8ߧgXiN}9IҝIRWYBy hKUqs9lGTn5(oGB"58laзa_&J;l-e͛!ݵ nKQa߈a][or+?%I'YdH!k,XdQ,hgFA{=]4|s`#ߺa~N&:lmf;|92jAvW>2ʐ$MpP%ucYiF#S x;6뜕RlĘ >R}N- awpP>ЭT6 uWXoM$q# b{&8 ,/ARdp)@Er3FW 2vH2+ "-J>Begs%,Tб1[^*AHƦ&hz Veu݀ЭmV= ܁ >c:;"lؼkĚIU#N\0w / / / ؐ"uJlWT&BNJBY /@cPF;1 YǤBd+5o. bKR$@3KgPE6v4#g9u_ R g3m}1g^>bs-Άm>|͋`Dз:[6}x9Mip/uXԺ$mBw7:]:D`4зޔզ5$FZL8kYecRX]2] ]T:F*yH^}bVX پ&E^.p-dݝ>۩XSL=ֵA>sB~}}Tf;.6ي݂Jaf)M'[>A>"ME@eϕm+Y_nFS; m*%-*[JftRz21 nw%(zɪO޸\)RttTTh6djfJ3]،3Յ.4.|V]xZǛIZ|dz7O.O>߸ BLe2FZ R^:ҥB/6|fYLQjxU< ,BdPզ.YVg'Cv𡰝D:ؒ|V8[Pv3WkZ Z{`SfZX)lP֎3 ^DAC>,ƺo!e,r95Df?Y6YG񁈹,ؒbl[f0L2Ux0vl+W#qЈh&ՒI 2 :Q2XuΩ׸|^cӌhX*;-K%3&ɒȧHZjj[c،;Wgr]l%{E%0A/>% Q0a޷),r}1BQIM>`qsCч͸c_}m3TXÚ_67U !V7~ܷ;><x =@Hw@ !ڹe-DYC)ΤӦh* GgǼ~nOjgx8L_33pM@6 \*Fy&iS}-T bPmEU!KQ 1l}84,HJZ%C0rRԺA3rv Zְ:O*lr->E;rTVQZtbKo+`Yz82`W® +\=K`U#EKt:iq*.Dlht8}3}qjhIFzS&c%Zk EFc9OK1 1BN!Ukm:%}ޚoy-wFW|QYa Tf )0L2`8gph3gwޛe~ ,',Im Mp^:\qtmR%Ȃ@V*lwFAܗ>NiLC$*R Y{ ;bQ g8 ;:8i`Wa*O-H)P_p NA٠.%H9%FUHQuV )D,E6TϘ,!$7PY(tIP-d xE Nb1r{Yp,E沷U7xV}nhF祛mRYRaחI x;Ï= ƹ,-n5Gp6 B@FqG)/.ƚWM9,.qR|\2<,{Շގz@}rvN/߼:; tj\rJc7BSb=aLEU.c?቞~[|gyuyX18^jv?nI(<]ԢvcJ/ljUkM˓a]4JzfuD}^u X.ѩJF=]lnyn[FǚuGI!&y`+e՚|y~_UOx3طI,FʇǕ/cTW_YA.*?%/ %6ۏ&xUxfɅlkW+.d:^Uo[ |Ώ=ZP\KfZpJ\ѽ4Qg.Ů Ξ[UCp)c%M3_]J@3'M]4't{huNXw`:'#,}Ym'Jio>Tbf?P//|qWN0N/ZKU4r{S(%2F̆9 (Uztpٻ:!qMY'N'M{h ^^+ 7_tLd^Cv{t;O:Z)b)9eDh%E hAxl~ fizۗ\Bw vcnܴɾ1䎱ioxvܿQv^]n/ʼ6%^; %N":oLF/ʤ(k$ա5cϔDz-^H6fl$\)X)AeЁ>*Md~[hCn>l{YҖiѳp`bZ>~;^bks|xt1wtŃȝ2uK0]t:KF,s0?O*dBuZexRPSE[!Y9w2R۶-bXW k|l|BXӳK">aeäAÛfx)akUui|r :3c,mrNY3#*'c]gkr g:Xm[,,ږ2Dtd׀S, .o|%d靆ȲWŵbl>^cIo K4zيѱYLRf >v%y`f/[+ma&{&E..Z6pfd{d.6 4pxx|kN"e1h24D[ FĂnohX%c@q8riӉJĖP2\5a@r/V>+q6.iexkZRxiGBvNq2ǀ9O(MtVn=pJ?kҟ;72r 2p"!$GX zc7BtXE&$\ftQlax`!3:*#"a \H`JY,Iz.!;-o4S~wgۥnPb}5M[DlxnPlx8S9Z&8E(hb$=F.n p<=|ly^/1OXP d5K9p1RiNiWlynyϐ @ ``K  UQCTݞo}ykwuoO_>||=~(şT|<_F="Njw%=?Ŧc[2-o)YX#e~9Mbcþx;K-ۑ_W[XjYJEodVrNɪ V&7d0.CckQp:Oo > G?G {d'm߾deqIO)ظ&'A;)*i ZzU ߜ(^듙ۨ;T0,&I?N} n:_5?~x,!ō= .q?}Zo-+×=V!A&]2MN~:A]x/#$ԝMQe8_a~Ç? } M^q<G.=׮t0P0vY4a#A2sP@SJx@U`fC*oDYX.6Ts, )K %>Tp( B6<:^ZWjwC+CwƉ݃;~1÷~-z6--n.i=Rbd ҝqxJ>I(٭sIɖ)̋J2(HZeʥ(4Ϫ58sy p,: ?,xv m^deulgV64nѯ㢹R}R-lE]K750bT=N86@>WEu #4sѣ^&kY?z gdK \%q%}.*I徛+RcՋ1WjˡWHPٓ; L|bsu7q%yZsu7izsu')e̕C/V7Ju9 cj=⻷o `ͨWmON~]frPg֖ӃU}%Ŵׯ"9P >ߜpЌMbaLkdoyY &]ؾnZ2'Ԯt@[XT ʔ8S[FH(򥖜N)ޞ&ӽZ=w˼=Gq|EO7!6[>FsHՐ\+/ϧi`,UZ _Ñ̲gLI$a1L`p4YmME*bǂVNdЄ(:o;cƌL"^ˈiDk45Dt|6Й8[&Hm H||U!QSfW6T\'Qk Z| 7U[̷%evC*}Ѵn'\rhu>$]92 mH2@ ńև΢S.ava_Fo\{E.g5/\pM}cTjt~8l*PqC׶)qRk~97S eAH}:$}s˾SJ+|[7+iPqȜww7˥SJWT FMiZŒ7 T;.6}FGn}9MCD$wiX:\ê3oʠg?UuZ*hꬽ*Գأt1M kl LoRB'0|HSR͋W9s~~`:>,KS<KS-\x]YfjD SCW,"!Ť+0/yU72xs &`ϖe.8|3%7YTx{Yyf?@I$~^GKxK.BDœ\>QJm'H!̬I-$𬍣Jh#"(9X+VD h:qlglSVhna.xG{Cv+'qq"A&F;>L0;ƭ?u<}e,:uy꿊.Dc`\ΜҥVpb#VЋJzS}nMLryå O=cF@m0cĜE NVFǻIzS^@ y{5; U@[1} `o>r+|zOx"C%MĬ=&jlo[S r! q1Lj.~[Ӻ;Ĥļ c9akKZo-Ԯ#!WcO3;wiE@L J9/?q) Y+2Q@?~J#s^tsI  c A$~ItHh>(M s0El:dnh3 ٹ`#Tt?{l_6AL"ud{qèj< G~-՜FFOJlMvab=*D: I$m1+N$ i,hmVVz ܐdQg'o_dF#@Jb4U2NZbU劊QM8NV؆NwCt{Qj\<Ƀ+*_;:g) =32B)*Jg1瘴4J2x+2)XR\`ti"&e8׊+Fަslް7ɸ)PW,S,XTP/4ƛ-DC߀'*D &C l()F>L܎9NBMYs9b8QmAޤ㦨m{Fm[QZ)f2VO"iJҲ=Zku)U DV2rσ΁۞ {3VkD$X.U"iShp+HLZ(+rK`_ψt.Go:.˺MJnbŋ$ Ay}SJBIHhi1@ Ӂv{H*..n &7C/XA|TE/ Q_)8a\K<εew εeJm{2KkosL+Nޏ3216VdQ*6}"JDlӺq2ƣeqUۖvm-۶[vK㒃 KrIYRD1iPJCWGP"D%L tV%w!!db^*a2PI.1m:W۞ l>~ڦ m{2_G]7xޏϼ=x>|yH9Ai`Hg`S2DiIR0s.^x1-*/R3K< V 4HR t$pVwަsE-A{ e.ڱfSAQ]"<~}|>h-쿍Ӝ]TWWr9"E[DV?! nQ"MU浇Ho=D"p:3ڈ`*z~!~}VhZGO[*Ov7,˩31tU][$ţ'Ȅ,gQ.`)<̙sy]rHXtIe*lWJGlE2­uqFa^yɸz?_Y"B8UV/=Y yH_ɬ JXmMUoWrE<x%b0HhP>i UUe!dXVO! <"T *8 Vd"2,&&V2D1|B+G1H< (?>RH[׃ō3O-rs6)ify=K<DaAKM[O0NC.!MZV.rukwk{Onm"hڿtr]rXw3k-(:Š׃qqlK=t`)z [xXj {gVγR2tT\{F)K:ﮎjw7sD FY<_w+a c%_ xa{gxrFѽvc3Nsvu߅ѧvFw;8b;V9!Y& a.ϟF>у7 QzSpVܼ^- "Gн*:/ޅR<E֯0j!z~kbe,4pG"9w"~\x8xcgW n{HdK5)qLfF$DJ,Z)sCtһ^i5IR7OwЯ8NN_hɝֈkYsn9[sF'#@:#*% kA8LR"Yk9)0ڔb~՚wZvN{>o5Z–WB2D,}xsK]pZDD FQ \2RIϽP6}h[zd54''No~ Onzհ4_'f59(nmO?z8Ɏq̸Fq)1 Kשߦc!;{X\9I* Oԑ B8$IMfA*"g8J8ly~u.W|U9dmg}519T@tt6&xEl$ hp&,TEycY]M>"i0)+vaym7_Hf44nw)Q&Fzw`s25hXN7/ZY̷זIm ͥU@,l^g/svWy)\1ofے9ԓ^Z,v}[^YwHG51ɭY+[3Po ]Og'wqY|}CvxtV%A ?ytB5?Y-4?>a.:h'- `SG#|sp*2Ԫe̍Y*M򶻛m .; :߻8+ոߺ 8> *`$L,r@ŤHeűRHKP(z-g\^( Ks Җ |fP8])y^)GwKަskg==N&)vWeW>S .oY~-f?)|<0]A &W+2=+?Nl '1n բuo'#\i-Z$MۍAT7<KC8$ c@N,6d*U35LhBo,9pAI-BcZe܅(.j>¾=c9տ^<Д roBv +wUUcEɖVE$KZ59 } ;um V,x,Q#Z80n/36NB'gŻћ+Au~huE: ~;_g 7,ϿD]~fͭ<s.c4 ',$G帋c] :oxܡ6_0- e!gI> LBDIqYa,ucT`p 1Q.zJf L3V LS$pEgXdAy׽]C NِM{K}K||{&t短qifD{'K#>piѸIdPFK1x301XLgoS5>6BOEGDžB>ȹ ^ZX j\P^dhW9pn8%h\N!v(I"QILHp<꜌DfR| LTNqH5nS+!ѹN~OϞǴx? ^^n뛣a*!d_vt9>6tG87u2fÑ%'G.g⨁4ٙMwޤNNm7su_dn8Щg֍>s_9FҞ9gnCMshOawPd .v/k6wOupZprpe^R&F%q:vyENlV:r ~%O0\Eheq|;L˽bM3W姗/gstz;ٯgR|3(OzU ~vd[b W6_鑼#:sBqd@(bЗ(&Ppa\AٻF$W.)#lccvx N#R"D4IY")jv5-Ů|/2c\\SCp'P 1G*Ygt#C&[:3: {:A- l##-'8qz_7<]l;'50&8g陞;y&\;ͅ{N&~zbHWyvQqEA+^Vysl?9hPĶvKr-HK?Ly#i .N6qףO~u2룋FwݨF_x:r8:oKXR=u!71mzFf7iЦaՃbhVzeݶAޱ4{U)6m햱s.?AmGaKZ$-<^|W(ioqB؟Y)$}`9Bw'!H\3y|qP Mgx?iqNs3=)3%xwydslhAAW=:6sRV*-Nҫ 5&c "hi\f Ht|MY|Nj6e1[$Wk 5ջP, !x/LdsC[ r4I(c:9E.!g7y Wxrt( 6>`K^7yK+B.zߦZA~ӫ?Udۖ [O6ė_i+2L*{v_+`mvP]YhJb}vH덐Hzdh/nJ5旿ʫPWeId]# Z5 r:.VXɓb#!N r`63dJ(x2cA3/0{3h42Xn@+W6˞TTYm~ TtJS IzRjO 38]v6QaxVg}hݓTi](e{m uВu b:Fx8on&R(d<KFA,`#o`qF&Z˽BǥW2s]a_`܀pu~Gp6,z#H@hngmVpP@`&8zc1N!tyN B@q/5PO v#F Qڧ?H*slS B>R"Rr& | OOue_5qhV&w-B|5-|GMomC\=ܑ>4OjyJ;,Prz ?h?58"8B2 DWz/ȳ]@|*?=ݥA:! ]0EoK]Z`6h҈Ft{]/ymƽڭ^l\;tV3?|]*pIGۼbՂu$~S~MGQ1݋.#RK L\^LZqdmX1LJL Ҽh%Ld*/]Ĺlj 0V_}cALAHKZ@2C: ]6;?2= Rچ{Eߵ7<Kp:7}oEG GػLEOF.Oz `_Wo\NG#svȌ㤑Q^Fnhg >e7ҋu % ^k38x5qFmZg.cT[ pѻO3@'4n{p!yL,+ rtY2k)U,Bg`/70k*&n 6#6_m:BG m9i{z15xaLG\[/f9J嚋t/*!QZA&Q`vh*bun={(g1 WFktIJEpI&93(E1J8gȩSs7;7Ik9^Ȑ߳ f9ެ?lc[wPm*>r:v}6m2nF&fTCԸm aʘаt펔eN;@S2t BV"0=:h sdڥcNh}Q)Y{de2`O+'yyƟ~UŹ(0lYI0 Q/0,\^Yϙҙ [IfgN4[B#D&BJfcpZ잨Ƅ̈́`)NʜBjz+WtwQ*n2e5"i'dV[0瘔>L"8Y2jkrJ9V<ڔp2=K2 !e!)8aKa<[K?EE,R4r% \BLP&x "GUc T.~,0$ֈJ pYH!h%j`Isi7 c p]8-%ҫdb+묦%E*E.nx!/T¨`}a &Jhp*Xi+jf4:x x*vj=UH.Lۂ7~`d~j݊ϗ՜#]2ZK_&9.*.FJ'ɔjqhXD @œx >@HsB$!CTV;X+,K. <&DLGWE\ Y(A,>uBr;ͽ@ȬȺ8{BNx2լWsZϗu6'k}z;1)#0r&.h1Co,"D֑m,vukɢ0i04; aD\+i0'>֮hPMݓu n%Lr꾀\f M!'=~PKxYGMM^࿍uu?q7B+]rJpyb5z$mt#y S7JfS;mw|Geq[>:;H.R=Q08X8*nUfA h 2 >`bFrfޡrC.UQ(K`πGY:('ll](B( r@ 6keSM=5=!(+/|^$k%zƁ%(.7 -:C :I(ȍBqVMQVDth3bdѲд^%4Xn*:Ќ@OPew“UE8BnM}ԟ -`odp'K$De"65l2FCkSoӻa}&RD͌&GoAh' .%r)FbUZqeO1X&R݉tHPLfʇKʨ)D"қAHGT`Mb%:>Õ;,gųLjeYb@/^N -qi᷽)r3_ mRT/9NƳѼ:|bѥx?G 痚Ơ=?L+k Ϯk1aeJt|>Y,}Oݚq[J<~r^(V7_֎XF :aqlz۶n;f Yyo >^}o>Ѓ2s<+AK)iNoM7Mp-/~Лq>|JQ!/;_*=xahzɏxeDy^q1 hjمBE=)^tF\8i5cI#GG"o78`wB>>~aؽl{_Er_kH؉F3 [ HěWoynN>b? dy5󷼚Z 71Ü;΁mm]]=OPmp{i>1ߗoXLD^&G#{DhN>m4挎& "FRd 1kdLMD,hûch6μ=h#oUJuxNCP&J1h*/ֱjm->_X .)O9pPP$2A-rIET7vA_-JQ ,~C4f}^Q&вY FCt:򑃾GcG~7I'k%W=Ki:*M:!4q m6*#i /=gcʦ1Mhb'j =N4UuT;M\&z^dyRTR@6T؈eyλ6@_}'wOn%'ion-t.#@a Gm38G;WnVJ,|rW__-q} 3oavsn(MJ+W]Ι"2~YjRxp3/~mgˎLѳ}=?NEZ7Kߟ̻/MޖgD>11ܺamr-go~>ɛu|ɷD7T槳n"z쪓`իo)<]|x>F[X=_̨5ѧgl9oW)KrBW.t̺bJ&++'j|]Q#ƘT_`s;mP f-~O wi F%s6sM;hRUo?cMq Ƴʮ75>oe?6=:jΉ\Gxn]w?}x@ey^>(;_q oy!)KnۜZ(2mڵ/m^z<:~}ywqъ=dgP'쏂}_N_a_J~6Srӯ7WX^vgwNg{jle ?g>^Ȓ}>~ g:eT]T$x[nbg1(zzlqxsv~"mq_Ga^~P3!I=ֽQ*޾kƋXtͳՏzMKYtim~) Ne·TgϮYUsT5܇A!5<<wyDk1%<-PHW l]6b\4~)(}F"`ѣ}9v]1uEWԕANs lT6b\M" Ӻҗ$u%A( @jTV *j @QpwXԩЁxP( @yrdN _zB?Znw զyiBF"`yȸ&0Q,K]دQ, к UO\U?Z$qmb]u]RHWFWiuŔ]MPW>Qe+n].j=0z]1ɎEWѕ(a @yTb nHohr+v/HZ>A5W'C;k\ԖOҦL 6ߚw}`tԨMѼQLdU:OJ]ղy D{q^;A'*uYY(|@ֲn:].EWLkF+tEWSԕV4IW Re+EWLuŔh&+&#]10qEWLkabJ'&+ eF"`DWk>uuEDuxwHW &]16iL`ph)"`id6b\s>uŔ;#u%z~%U?WRBaשav@IiƵEWVԠF4ltŸN+n"Jj-NwT΄Y".,CF{W&)6Al,͸hr4;vK3S4XΦW1ϦXa^~Ps>jYA3³T'hݖl>N&Td\MȴZ=TdJ%T`^(銁ghQ~d˔J]MPW+sҕ]>b\rz9v]j2(`u>bܡWnbJ_)zF4|W\tŴFaʒ2I]y銁FW+!98v]1-Y_ԞUo>йpq๫>V~đMzJ]ZRϱ銁FW ]\u銀4q+E3v]1EWSԕVzgSѭ5d\b0[I$K4 V^L ESԴ{It#+=WШvatTYA3zfxJARYJ/&cܡ3I70)/a< ҀHW le6b\iuEVآ jHW\tŴƮ+tj`At&]1f0HJʱ)DWSԕSf+$d0-)w6(-2*kFWkӺ))bt{V=.Z z+~~UPhHW(ǖ=tEWVTf+"]1;j^V P!3[%EWD$]WLjʃ*(T%`!0x]L.+6c?xLOo fKzzGwd/TO(Dm}F!2Bd'DfZ"33%D`h?"y }@K/'zp]1EWԕF!#]1Jk!D6ӎ_WLmue4|u+*ǻd0P+zQ:Qt5E]QX 5롺"`eb|D0)dLQWs&u"]jg?ʒ2I]y e+>]TltsӢRѕ޳B '2COz[^F iY?J5'tաU/AaiB?`JEWL;TtՋr /XW/oU=z>vz|}Z)_3y6.1KnMtG~FZ_'.hϷ ۫e_6Bëvs5?uI[x:Bun Q_\o_ ^.R.ju~Ie{x^}m{ci_;EWyw㝝sݯU1tnn{ROڊ{֕f~]o޾|-;>UiGk#GHDrC~8ΆCB8 7wg[e~R~27TOS*#(zZt5%D<q!eStR5Ib2B%M(Q%Y)#k4 GY~OVk$׭ezs|E߆wZ5bPE_Ώbe@d-k*KhEVS+SiV(2jﴁ:]*T(]R"J2J}=TU:*h]%6VDCIu'}7[jPTH-7!-61M :/}VE 4S<&M)D-p>HXSex$P1TMW ̽tk^a߆\^QHu_[jX2BJa1B!K*tu'i)j!=ҧԙ#5Sx7ibEJBFk{@DԘ#݂)6Ґ UnH:H&:UPUa} kDMJ٦ h ZPpᜐzCDl.H# J4\V'NY8B-֊JFZWdI_m\UMRu@>wumu킚/VR~&k>eW{N$X D\ pqf9ܽO͝4Ϫe7-GpNTG*Y76E5ɍK6J |1)Nɇ5i7л9Fd{r5nj9o3|1 mBiLHnJʗ VCJ 6Y ЗI buRƗ ZRv|^IbHuEQCBNFG6Tق1K(#m'Xg dGh]udV@N`Nڨ/ E*UFG>rJ rSNcG3PQCm>+h-aK۾[qDg()& %JJo*|_A2hE ǖژ[Ckq#Y'eV\Zl莬Nc[ 3R#u˶ E@PeN$ic#6JQfjPѦ"jх6[ r  (lj);O-`׬jX;M*ĬeBGk;wNSYbԨTߕziZ26HQc9MF2 VԠ uszR uW4G e2aۙo]0S@ JAU.1a RLA9 N0t掊CC\jSS5qC%u:R!J@T:\CAN2MC{X +TtgqJAQ"Ł.0͑vEUP4iϒ (Ez@?PiW ()X|A*AŝETݓ. R^|Q,f^$L'HH/>XԠN>PL 6C Zb@$F=(aL! #Aw ԙ>/7nbA\ JHN+ zP( / }38`w_-yYsߜm-x bcgG]`100!ƻA vBW\J6#JhIWo#f BX9Y4<|TQa7XP|Ѥ`QBą򠷒!HDNkk*fʇ`)C#`8ӥ`)`^(^b u6B[!q2p /=U]TBNu~d}:MOU;:@vTVB]+!Hb!w5 cӍ~7`EUv!ȕ'X1Xk3tdD4Hcc.u%m6@^LZ@mDBMuj WH% ep7P@R@"pc2"<ѓf W nk 9%bkVcCX DA5DC7HX] @K,brjFF^ZF!/D(DxQ:Wnn63)I@̴LZU ה!P?Amj DQ;Pf`Qy8 衲BXkY!5RM9QSTZ(UtI{YdP55Ԁʬfz86RjvT*"ر,e"IIؔM@[T_l5vƐk9Nm/+w  _f|u8sej x@^,4zV6 `- Ϊ4QG]GOOU9&mF(γF9@[Fm1P.2z~{4\}IeF٤#väDy ֵؓJC6G=T3ʍڛ(`'r5wn+2TJ`]PLMYځ DGCPCz:T}f=. > XW Ժ) ur7p,;:IycaUè#;pYTQc$T:9Q\J#xL ;T,~ xm,mT11mAiom-v+5ށt&zPkFQ{&=Lb2Վ *WZ@׶wy~5801^>@ͼs jX*އY;Tڠ@n "I'XkʛrLpy/FF̆f@S AQ%#p45PzB%aPJ↑$5ێ4JC`Ic6\WAҘ"&b9R YK*MH 8ԅNqf@Bׄru3BXD;j0BI>z R DŽ^|˱zVQ05F}%^RRnYRsK+l-_P~S.KS[By K iWn{r_? ,˭[j}{ r9][/?V?-M:1{BpD ?Z]/s:ܿ[SK;Chݨmܴr؜a{G 3yWqi/:}F@9@<''e8@@8  $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:!98|@Pj>N 5@@i@2J{o $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@:Z'o1ZRq1\mbVQ'1:PQ'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qx@9988 fZ d tN H'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qãH=Z_'//)zs?\s?ݵv=w'@HCɸDc\\gc\p% NK`\;"z?R7.c qKdA̞-M6dBd䃅rgΞ)OzV(o>Z#,9CT%yOn oM/e+wfũlmj"c`X]b_WWWk^ZЉS̍yk$Kg2rէ'y {V;/I7oMr!7f>N'?u /4,)'ƒg{(tpu ]1-]NW3 5b΅kst(M:B ΑM3+l&?ZNWhcTz~ˬ.9}_.޾P6%Ov7|?YF-wt{{+zcvWOڻ|w~Iq'!wf*Yh^뛿->@۷-V{]/>hɋ'UoOiJz i3}iE'ML*<=~ۼדn?6';կ^9J.Wg7=a?:jKh7lO9 :]|e{?ۃw&kjSʹvv=nŗ p_'7#1g! p):FkK5Ficԙ3ҧ· !NK.i7#ffC&x܏INӌ4zվt|Z1-Wo.S̗rbLWM·/-J.-_= F<#+k)aYUmed?Mi|tA~ e7:s]qUiY\Itz|Gݟ̌"f#Td!TdI Q*R4&۠gCW 7fhIӡg:r4#\SMa.tb. ]!]ꂟ]i|A\sR #]9 9fCW!ik̡LA*&g5͈+\Ѧ/:BJ)Y]10uplvPj兮~7t9T+^ӳ LWMtu9]OzE3#`ol Q&:Fré >0Ly|1bw:nsU3iFgC 7ع4MiڪWhxhꤓ;gc=K/*l|}n[~pj?֮Cl=AWQS^[L]p|v^pRs+Fk͡tNX]1D+ z6ђ>tb. ]#]9+ Μ9s&?d :FSܛv% cOBϽ)ZsתkfveeգB_ەJ?RߌkGn{UŮ7nzڵZCXn٤5'A7b%\nCOLl6K7ߤ7W: BاM' 7X qC1ש?}|ow,ؗOu[NnP(,Nd7dv j4bs|@`kzwG+׽\us-*z7-Xt:.u!r /Y-xSn%e'n*u~V/uXU,ž5H\+UON73.\?#We'/AGF/)Q\@Di'7@°x|d̃9!3{f+ժlxXҒ- unӫlXhί.0G}h vÈͭ}B:,ёç8zqyWu ~ ƺz6j`l|+Hd X:QƧs{e4<5 E4rosv:+,1qg(lm,|:U3yϦsAk*ͺBUʕCV5s)=3gbȕ Us8QGq{}=qn/ F%EL-F`ݲ@,mKo&# @ϊ #Bևې!zۄK Dg>1'&N]D $X\l݀&L } {KDy%,wLn5qpDO@Z`8Uq3j ~RO0U|cTgmsp%\_yWN:3l=U7UӪNC;XȝYb2]u)/P ? Kup=.~3ݣYU% ,Y4zxWyn}F;^܎g_0W5l*Sqsf }z>՜w^tw^ߧ.j=#oewȤ}3qsGXsʶ,xEL]zdV`+cA!"L1y,!cQy7(Oce{P ПHsNmEq]Y432R1`%%,HZDaRk2F"Npolg<T&GmiD2nM›nY~(pIc[@a{s;nˌwLZn΢˦]6'5^8HTEAsr0WNS;y6=6K16xYP*HM\6LWIw 'Bk4-(.h^ < >+B}CZ2C9uXQe0<8-e=I`i@#uFsR33צ?LgmC .Qp Gq r*%I\:0JB"gCjאDxYţ < 3FE½ ʟFGa!ZrSf3bSo%@PҐjWC;:pje_eeN TYM_jN܍:3^vYtT{9>uS?SZㆱxmE53D 4M]2*n<^ՖQi/;U| wJj7/Xuf0-s=L;* Lw?So.|ȅ*.p?YNfO hS]2 KAs"ё+t&H| AΖrBbtkHI%s1fBJDhh֛@(*m@CUF8$rL3 8lg(| K2R-ўRʣ6J=4-ƲfZEkSDx#$'tqԡM n`rwNe{[vۀQs8y"Vv<ͩepⳳj!t"XU0cx!w$O|Mld'; :mjlJ%&B@\EliYd4X.FF [.RRn$$J")}@= bki OP@kS)AL[E9>`=%™L͋<c|X匆e*֗{l]u@2D'RȣR>m:%*EK6k)}q[5+8xv7IIkO'(YڏR(P1-9@Q6RKRJ\nw'ͮ9!7@)UV$ujx@Eub! d0&=Z1"vf),M - 2KXG#ьKkMLhkmP ",ӜOaSkzrf{?xIaԟ6U?GCeTq8SHGDpT& )(rȽzFet1yOr[ ]1YϢhRNC;J@pB۞5qo [ӌc}jʳ/|T_,#f^ޕ n0+V"`u!MƄS0PBR5(eULD[>8!ÓadEFJ%Q3uBnGM@X$i;-q{l?y*^58k떽>{3݉S@P-p,?(4-)AFC S"QMԘv-IRi<6&!#2hr8h5HP,~R!EIFHʀveؚ8aEI?Bv'cҏc=i#G<{]W7AOE[*g>TNL\v^a4y& ۤzWxQ|>/y7W4K8Hhr/#orlWVGvARZ—0E(Jp,WWvV5q2^/$@P  zpK{,)xl0׃-xoW74nٯ^R t{͝770Nl2,k:9VEƽ2K6vFBdt~yiƟ^||kndڣ(prC(mľ0P*uI\q.{Su+ɖk(KPک/I(H amAFOR\IƑ< Aǜ1Ms82A"**n?n={-(n[Eܶu\ M SlNF7>{)Q a0ugRSk0cҪ#0b`Lxd!R&)#0+CʘtˎY3n{ba>i]BOIy󙽴nV7s(޼_zpry7.jVJppŔr<E%:[LA[mn`[)O4aia}bK(F[ :AV4gm}O'L/3޴4Ռ˗o2#wnJМ|Wk1f}{{nqgCX{te&^iJwSgJk-kb*P{"='S4QXD2488#u&3ߑFBJȘ }Ƃbl~LD/DR%HY9& Wy6)aKCvU JxbQ0O P%Q H`Ƅ*!*n.a.aڽD?~s~?|ٯ~ST\I_ˏ  c]6 w,g 7?MHB>{[U8FZl}zvACS2kY&CqUQ E Ū U1K'iY?}9oq.s}p2[g2ۏ'W6t%R)I?''vR 4{"5~8p|9'3Q=hw|W(+.YUq4Ic n:|%|ݻ7?}]-&>W)5AzOɃn=A (6Ȥ,%*E{N_m"` XrUh@uOb]oFPK^Y[@h=lhk]"Qģ83>Tk"z/5z% QC+Vi +X; yЂX٧hָ|cQOJj1'ׅ.Yn ~Nˉ)ލFq\"M0m>f_m(dߊi i6尿tIS6 /zwcjs"Qm.3wh6,sF:S[6'; sߟVw74Lo3]egƯxFSTjdR̷[43][wu,I6<fSr5*ngDݷM(;Rcɨ \SVXި 2x܀EbIyiǁ^Y|z-}.k!OTO\,ZNR0p -(Ǩ F#R` PJ9v@/iy-ѯt5Xqq6ո_ycghb( I]GJ4ZAQ>:Ҝ_&r^r GORJ=S|*פV{ߏ`HIJ\o_o%-#|No'&"a 뢔L dwʴ-{Eyg󏎫G??_ DBv ,hn[mpKg{H T 5-u_OʻOJUϾ .Of5).r2,*=d?UIZEsBy:^eٷd.x2r%Pi3I)qGN!9_%>$_%WI> q?iw=\wp ;/4IUP*IվHQ9•Sdey66"%\,>JN-.zMg?ߐnttiz8^8b]Cێqu w2oCRe/>{5lSD\U/T,H W^+XpDJ7^)0~F<QɷZ3RFV^8~y`4EqT׳>9n.J,_",oUq˱OU*pe-ujJq~п괘S7T}?OlE(PP$OTEbM [bReR{ږ3,T reN2:ru@C W2 &8FT$ t꓃rd+"a*za6,:p`"U!uP࢏.jne6a䐤Fj*ayc"'^q<-':jMe,<RJRWy/Iw0RPu!,In~D Bj>]wVT(B&Q`Y*axU\0DN(GSS[]+HXGar) A2 ƨU1k )ig-Yu ҭ=Z) 3 1hb4ǠQ.;W^?{׭Z G!h7m(fȡ-xRvWV{gzdWW[y3g,BNd=`_~MB@2*_4t`K$IIJO'Ě.p@"%*Pl0{hZ^AIxAĪE)DUY0\.XR8G z/Cc,Y#Ri]/F_=}*}MyEB܆j;AC2ɻI/L&Ȉ5-Hrz=[jsx- Z~/O -.HFhzCVHl^::ybC3 V,`Rt Awv:Q_t Mܾb7, +ӿOK7.a|wz_zC]JWvyѣ+ezq^>,+ʤ4PĤ39 xb)9HrV"i0%tӶV*-$IU}aUt5)$BHE!zB 5~w S=Iw~ޗk_`>lV`+k=tm$80aj^X}-{ktLSVv}Z[|S[˾I\A*ISJ%SZ+6z"ۢd$hƜ1JmTE} 4%!Ztkl֜Fi+T_[هFHL/*# ,Ƃ U&zMQP:ր4ne-$d%J[n*!5Vf͹_Qw2,iX}CLN^{|zyvz͔Tzk^t`Kx<{HD{K?-M!Eбh嘎ٱ5*h.ؘ-JIjI1_r.z]hP )XKDr(ŀH RZJ֖YsYVif IƮ5vOj -6?xK! x8r_\x|z1>Ϳr-3LCQDHu2fA3VFdJ^\tIz uUbcmIp7TY ȶM|r^ s;baD&CV͹b$_bIǮV7~aZȘ#ilR6R0^)%KSBE b2 dɳ$Suc6b`$4͚s=I}p5=T VE -b,`x/Q@$#P03FiѣĪkLV:(rSyì2ިRS0I)LZKj[cج9[ċClgRLJvZ4q]K LT(컔sV(dDj'MЁOG'Xvq_a3ʶP Lzg.gs:{b|_sYRߝ`FA> n~|GIV7~ܒSu&ywΘ]6㢃Ub錉3.h]$/!"`lB {" .jeeZBuȐK JY$9:e jS$d"keJ3:Sbh0QT)0 h! D("6F͚@)BΦ_Sԣ=CWoj=_\L1 H`ŕNGj=u:DTF2@HV&.Rb] TD֏4AEóaEYP#(=Jk('IH34kJF]˔{{\,Cn^t޳ɧj=jUMw֛'-w}!j ikVm]XU- $pEYy*AA%)rѬ}C-oFVUa:NZ& A RG؄l9fFy(߉Wk%-rpΓeA9*e#Sdm*`#u $O}Μ_M۷ӊG&a9)HkkhʛҀ)!b58IVr޸Y^4KqQN`JiB,CWڦ uzeߠaJBm*`!s /"UM"%(R'hm!8@vQKd\KR@9%fU*QgEjHa@U3&G(H%4\!-TT+Ϣ\A1=_,y` B(EQn׾$dV}nhF͎OmRE0W^Oѿ,']~.Fc` >v4}`l?H5RٿL#] 'g _cWM{5.zi>F>"i)m|7' xϏyQ^Ocz_|tzmBSb=۲!jfg_γn\]G}:uI^f/BG'G| <~>:դvJ/5;7fԽMyw[HgoVhd`WnBI!Y{fz`6iƱ[[ ٱaQRD 2$lsV;nn~EN)gUpROS܉^ۦAɅxkW3_4?Toq5g/slMs8ftnq7%(ý.b<U^I>RE#vؗD"w"EhN>-S8s6;hMdbf(kșm,J*h̘fܳӢzOhe<پlh&l*f#~>uTuᾛ\b0xo8(vT,(].tAZAPhRXJa.%),@2LGh JH,*hoSM0I 06 e8=ݯyvAhq*yzus7EgBT d:F'})%ѲXG6Q%ztw_;x_"icpm(z%Mv7/o}<R=ay.%;%"JV`0,,۫scOS[bdF6$}I>-Pt-QuT4d͹B uj3ۭuaر-օij~bfa3a^@?bČZ)1b8'ƙAtЧe^?7o,K޾fy^+j̿q vOOY?T؜OqQǓvUqҜbܲRbY?ghQ9Z7-k:[!Ao& ]^zmvu| ~sWuym]}šh噟D^juvϧ F2|dZ#XoE"ӾXGuStv q}1PH>udxM"`˞)rJE/ldMIeL5-ly23yh$8YEWq19m]" *eqmKj ɰ/UxzΦǟg- &4n<$>|q̓R'8N[5gR-&eÂ&cFYZ1<-(BVQ`ܶR Qe)#H m P Ă$3$VJ\h]RYs8+l _{58ʯS,.o#eaAzcگ.RkGWY*#>slVYpb43PT kRc |ۓ'.ϫ.rؾ{fp4y*nAw۷Nr0zOn{oSz71.x~nʝ8Rkg.qcBs 8S̩H(rN3TJ*u2I,[cʜ,͖䕋KSd .s( Qm)LoIjҹ|zM|lMY#v% lUD`fX{{U0`.I RpYȂ"Aj*‚N䝒 Kgc}EYf6$k k106#rnn+|f&CpdpAn-i%%a+vdIVKmeIQS"Y,X[ENVc P.,g-Qw_~>s!i)mu zj{KCXW̍x*'TmX^#hEEE<)U}Tj8J9M9}ïv;a$mOj%0js'xԀ#Y#UG Nj PjpUg2=~ 8mwYOG/ùD^VaUf#mRLH V " D8DH~ ^0<`^Zցܑ@0QGHK. LQOwQ\DYzyg} iI6OV] [%;ntC-ӲySmGy@ m1A(=eSBᨉF,F"`czG& ߃VKg )!cs&'0cYDaN$5\rehpKK}RqXt"TqIv2MR=ҳ*ze7-{C{I.6/ na;Ii!n"A왋qIF8K"%қE^I[)o5a$#|FJIJāP Jދ JH{E8^j."JJBtXqRLP^ 8}5HE,Fq2xYa>㓟Gx8]U'0&Wa2=USn~IHUUoSUC  d=O D`v 4 WS7l0NEXS9ӻ:$6$i!]`@aXJ\E8iZʣqFe+82N={]zS󬅣ˁ=ZaZur]}"NѦ>o߶m=2٩6j0' .vux0;@aC>`YN4Ho3Y:q=Ĺ[cR;ĬW逰 ߜ#;^sv>cs6صzN&L^X`K_x*WxQq} ڥN*tR߆I |wo[zm~~-+-V7.Ϻyy9 m_K`^p]Iz#!-CZ\YRhWr~>9Ж4<0c)U2xi2%0bң{-~4R^D=)Z) tL( @C N1bHT!),1mAƳs"Ow-7ոQԱOT ܗ kL$`6ˆ\J0sMƗ9t:%gH)EԒ)E@ӄtÂ^Rh`3 665>k:ݔ'fo9J{ q4@Mf7 e4t:ȍF*k@U?RoÊs|WF)}vim)h%FV Yi߆~ ?ÿ.j^n:$I2غ(+x5Uü* BkK4*5AR鄡"ݛם*oחcEP ~\G/.Rv(SUy&zG^4$'n'8K_ fiMʜTw70%>{5HKAIiVWP4Klۙkט<۰l'3lƯ-z|-ɽs]߽ ] 1%{נ+wɄ;. G$&Su CwWWIb<]Y^́ (Q\:< b_/xQa߈a[aY B98TĎY/>e #QHY?x0pX1c2b=6M BZ"2_|f#ced|R7Mh|)hewx?ZKDH&RyGIOn]7pGYgqJwK:VIKYv bR`ǾCSn1[wl 9n[=4ΑTbR07;pVQWZw|;12wQ ]}9jCH.kn J֝ $PGqrTW/YIO/-= *Uiۃ29?mjq| P\^蝞'AB=!DOň=D-dy.^? :-=C!{{^?O{^?}K ‚H_x1_x1_x1_VݜDRG6F+g-iGx2Gpi{󘛎EMt^k'Łnx>Y=;ʌOy LqˀY,CjKN@!#VU.wJU:p: e}gX*nq]|imR5:ZTɁv_}v\o@GYpvI\c\M.XN+[鍿bQ:ߥ_fUY-~0b;%MtMfMG7_4n@{ yڀ;-ǚ}1ZB_#y۸# v_}];}?kޗl9"dK&d(䭈aN)֘`!bxT8yf7 o3:ޖ߿ * n qਥ_Ձ:ȁS{8-3dK$pozX=]t(űt0(=Sٳn>{Zec1ӅRyNM47-aȭ_)5UHfyk! |/$Д 87NB*(厬4Nڦbʙ.ovrW[(~&sqGakf>UL qj0ʶw,=o~ߓM+0}LQ_Wa0'R(M s0El"'w}DVfcAF‹+;tr52nύJ 4X0B(=Y%ۂ za v Ҽ %b-,vGEwo> &8p`R"r/"gF ` 4L4]T=ד*W{ Ց HJLRc·闩k|6O~ݠ4TKlAdA#m=#h@ q ,&+2({[gyO !){Dِ& fh/#vH8)0- 9r;"gĶ_R1Ejg},e/{Zhׁ2fDP4LkmI_?;s30`-l6$wA?]d#xݯ;-˴-'4fw]U]*D=*Zz2jt4DLrE y4UšӋECڮ>{?yb9g E)~|GKhjR;V’x4wC(Q+%D(?>qƅ8T ay)^4 ` uA~ $.RP >@HQ3I bgag!rf (QB;ʨjY3<8ZgL $N eV1'8ÁHT ƹDl˖uk9.ׇOs7O|&_g xoyl,򽎋_ԝ5[>Ĝr<(~xrDr^m7E>*xA*+yQ}z$k67qeN2*J/\:Tp^Djt"䰝N{Npij(c$r!76{%LZan` mz@yW%)u۷o6qC |k=0-k=K\t]ⶒZ/$90ϾUaIqnPLE1=?ή!?١焜>/~ 8d1=#j]((ZGL+Qh2eݛkc# u.+y]"7^3Vz T.&E#W^ W-UjO濭\<L"˷1,oßW_Gnj%띪TFF}V˴~QWx_,K43r ,8{pťuHlld~YujMU9N-u5G֚c_5dT1@ze!KY DbD]CR4"&:ka)+\!xF|B&tF AEDt&*.XC h}n<šjj'9Քwel>s_aH-c6^:Hm]P\>)<.pAv' $#[ق1[1ࠓ^W9hA0%4\*Z%t>nh'tqNHa. d= onwu'r2EykzoI ԗ(S pRQKg`eܣt5[ig{f!&XS=x@rǘq4,c:q\(qui/ZI{:q<::t2X(eN珝b>)1kE;)#LJyKB.2$1^1pzlmYo_"M@2Ԭ6y4_28Z}}'7\J>Iy䐖?iq*\;)RDJ,5 [#&u(m(hc/ZxIxP.eTJ9+)z+|VXb1AC(\H^'mmT%20 yJڶ[#v5tZܻ6(Z١মnUgi:Kb2mޖ0ٺ65?7>:LLh_nbZlB6Ga'd0?5d(V,Si5"2D!Շ:3?;O/IJ fp 48Es* N%# yes0*2/Pˍgb3ſfD|˖GalBkXۮ씡}6DDԑkg;@FdǺ>zʑ[}-pYN,FXc׃2yU`)U: xVgM( q.AO1vȹ]Y_GijZ{աǛʭmuf\!'BRh~2"k^f6S=mTYc)B֊T 4,R){k:$@U֚O VYߟ?]h3ZZgp\] 95y=fJ=>h*]OO?]`1oR^\)6yv&Zmq{.)T@)y22bV: ;`Q bTd >ޮI9;8=<ڥ ٹMmahb-{E` ɰeUφV+V+yD?V80qp[avF %VyVߠ?'NPc7! J& %BNL&#T"w[W-J_\>} tMۮg]cm%:¥^8712p 9G}tʰqnPL3; !S bx<jA8L20DdRdTځ0[c&kPt,Eg4YfH)CG!8e#%S<(x!vG4)gYSPoJQ *NjIJ ^i:swͳO D`FEIh¸dQTlMTH%$%V]N+a޽ˏ?Ux_rX/R]_c>wPWÕ|O,O-4 o{HcWU7^mYa:hhw-'70٤aH3&M4Li(lQh(嫟޽N$^']|dcqwJi8`GD(ϦY0h7żaIAo.okx\aVqt4Q//^O-nR|3_jWR@Uf|b4!#.E&(hm޵+!8j2YWPiu| 6KFdu'dֶ{o'"[mm(,FwMlG WyK %tt@'ۘ- qrIKTj@)\L^"%6&yJ >$Xch߀ wE2CTp/W(ߐɷE΋YQ?L9'**Ьg8VV8UemJOHxWK`{d W7+t.Η\<g0=:_ѹ 9tVW|D+xnu#r c~w9kZ*q +7hB,;ߩwgj޵ܩuP훪|8bq4S]0De Fi6? i2zz8[dNVRHڧ*vv#٩o^;GD?5W76 vh=( hG0mNpVnx &דgj{8_!i籺 \ n7&/Z, )E-߯z"R_D6=Ab3LMOUUO4yf4ˣ00r>YY8tn,~y/#ۨvNQTD`9h-6ӥ_JdžoߒZYC@nO)Ӄ)rz>kX&ZQů)֫j{S "C k"!Y2E@#9}l)ʘNNk`Ë ֹ]<-?`1R*} >ppC fv0X5 rW?}7? ?Gw5W Uϫ6ėbWvB{oi]z:+yZi=h/.'7i߿_O~ѷ\dId]# Z59`(ν_LuPى+:Z f3Crg!341@Lq\]%=w@Ym"*yR՞$'Y&-rjRr6Q~+o7~n-޼{Jn6 wf^po9Ws'W__I@:l 8KrZhN#gq?ӤT)Ͼo'4%oi4?H`~Dz;ƃĵ\o1q\iWI E+f$+wȖ8 [?<{3sv(_0oA=gSWNAOy1,oNtafh\ۅW /j~tp.طd on0aj:w=m;; m܏n Qǣ_ ǃg1!^[U`M% / YOH0;}av;%ޑEG7oRAѤ~ Sߑʽ*Fy-fL>.Zu*0]u0MRakiNjq}0F-H E%o- Եnxn0gz ;Ϥ5V)s5飾xNɃix>I=gz =a[3c1;:`-|4 U\}2J<4[d *&sC<g3 kF)-2f|}^kz/m u|Xv[8F gl+t\zJf rʝXuT\E2uWHkڻ_`Nܝ \i:\)+++J~2pU'_RupER:꫄+-VY!-7m ,ypǓt$~xPiJq|]O|~ޥmSVs^Xsp:i:'5Ôpdߗ]XϠ:.cELrsZhSa)|Ws͸BWJZTC6ߪKS}B_W/tz!y(>q/5K8*S,B]e6CjA[he6Xp)H!EhRRr& | T&p]z8;bЙ||=\W 0#){^NH&󷶆[jB8ul9 g[Y $fKf/=i9v|vH>>rhSJ"]C,okpofCAwc\;OH)e7 +?\w~37x4pT3AGü]Wsv-@hxd~१nn*Kr֯ݒwa_jfPj#duJ hix|F9 4$/aFcl}\11#Zƺ%Q#'"ڽdYRC*Beө59{.1gCdz.a$!sJYf@(R}TjtBR1,E6P'B2rr,Av5qv hڡ89:֬$ɅF~ }rhqZdR ͨ ;)Y8 d jɔP6 GAx@TZxoQJu3lNXƴ6dM&k&lkBUҾl%c"1IVI*J&I9cBV>1B~_tn>ɔEB$pVځɀ&FGsӌ3l<>̆B]o9_MA/19pCxvQroX^ka0yJE`'SPĵTjHZ`EJ}mWX`7hM!zAFR7vb8omCAjZ-Kn O<1_ukcps38"8HQ J{@Dr擬#+K#@ K$@9GMy1kWS$2N >w&8}kk-wpX;9+sҀT2k-}N.CetϚS=9vY둞 9IN!֚w^ɥRTSBDT_TGҧuH<)Y,8Q2Z[ɧ;@\ ɦs%HV:Z 9~V>O5 ֯nV~sB!o>1fxW<ٰ-{y;Ja;6OS_ES4UN#F3xPKtVDuKa ѝ"dj!e+Dr4JsdڥcNh}Q)Y{dǺ9=z̰;StM¢`"h'IdVa@ d *ze=gKg!q9@dQe+賂)"@FbP$*9A` v^D-q*^prKq )dGtYwڨI>cJPy{]HT P\ŴH@𤜌j\GI'"&C`e[[NЃV7)ds`R\K8ۑR iƾX+cX8ws[mx;܌^bcKFk)csxREXHD#R4 hiAY?R'B%B%BN$Bv+ ['#9Eɧ\ja1 n- bR>84eNYsɃHP3nL. i%GoMLGfUc"8\Gw)O;s9"2NVT㻻 RL{F Z8F/Ed֑ X>vuɣ,J& f q朴y Ip<' `() N+q5q뺺4əWs%2_Y=iK5Æ6IW;l XWc)uNqKNjjZY`VWRO1kIڴ-ũy%sSDzN,_EOh*[DyBZKz >`bFrfޡrC.XŞb 4;|/;%.׼+[t^yIpAz̠BloTdb> ހ1JdXRFO!j T 2XF`<2,DfR5|R Wj׳cie[emS* e4)8IYs7 ^?{Wƍʔ%#>Xz8MW}ʅS"%EN廿4Ge+ _71/ǣdYM:zY -^Mf* >^oP(\f/VZ(6VMMМap귙4\iZfg09D!`,Wm>6uMjMkc[ɸ{Nb>X攙+kܘ`s`C_u$G(68̲e-'0 bmoƝBI}Ykga$9F)8ZLcaGOLU%y<ʲ\<w(U3x(]#0MрdemMR$jMI2WWxݮ3. l s8 Oma;'WJޢ~Hޑ_'"ZD#Es Gs{A 1p&v*s(joFI*N/Tj4vCP q`8-RLp72k"d<'xyv!M (KX/>Ac>XOd"Xz$=4c )VZ)JzBJ 1hb4ǠSbI:k LS!l B4U) p*lRXm;Qo"Xy vru5Nc- SqZs^ ut %¬6ƸQuk ipUk˽qo ZX:scxaL C!%aGًAL}pJ6H(>|QRo?L}ݨ~-Pe3XEsFJf^rC\#ga=Wֆ5xPOiGhn VS /۹#[ m [_'yYUy~ J4o`>QYOGE%|@څrܒ76a;Ȁ+؍N ӬF0L/Yd*rT†l8'-XxM h 5(Vld sd%P^+4t;5HB{,D$9PvAu4`t^) 0r&Y )PʍڀR92E:#aREbvȹY۔BOH#{Olҵ 怊۫KoEѶ*4 T׶oHe֡rB es~Y P#!ышr%8%qjŬtPKLSs?kd/ZG 9 XPadyW-@ƔL0wءVP H|MB?%'TV4x8bCKغD[G1 brDZ 1ό;-X4m<&GGrۀ2ب0 c%⹑ɢյ~v𳃟O~ZF($#r;#p *Q1 [nQ*9a]6g =.j">vټN|?ɶZzUf[;tfSz"\Jk JD3oզD 娑DַịUh4BkM̀>˝QFG0`G@ .NV>:vȹyoY:dzxlW=^iNQfj+:4˔*YIz糨g|&gLDA, ǐ+m.gJxLV&yO <Z]]h3&`v^m@ܣ f{}ez㲟Ve<4mʗb^r/}q%7SVÖ6ke-e)Щ"k"͓XP0A*R*::*d >.I9;?qUj\=jNy$FջmMJob7ۂB"mP9  Shℎ =:z0't6Ih:1(*^HSb#uBr3.L.iyyӜqYyAN?; ^:eT`F(fxj Ko<1se[loLo-o> sݛR'nHZR~[%k(vMقo;M19:n 3,kFLPj:Rjsfȵ1OBDcSn{;*HƷ(6Ns8eC!fH ^ wE%| 쭭Mׂc*_AmN/'![mMyKoC@CIe3(6KRJxqe_l#I ·MS_ͿF!ykLv&joai>7 ZѮAO7i=I-n&o)RA1^UzN0QE3X쌕W} ݧxIܧ)3#-R)לDb dhF Xt4D{S(ST̝v8]/ʽ3ø)ar/hl$9;Wr)_Lr.g#9oQ;MBw9Vvr|zt?O:Y^>^%g#f<\JKDJ'#4Q)P# Qҝ)^x[бqQOrgq(CkӱP=9c~奒g5Brqtgd;D,F Ԕ (͖Rh \𞯏^JS09V/4^m[>8ҟćs3r 2p"`qic%p~bꍵERhP pI{7u '!u`FG4w$P$L(Ғ iS,cWN "y<~DE7 n6]Ku8>NmPjjm^70>V(/p-2e's=Lp\JmpD## Τu3N|>EO'y>;]H c4!>I?j& s"c,ӜXEUN^4^7O4a`@\'V U\^ЀfLKh8mFowݫCG_Udͤ^bitljYK%/k3ns?%IsPIZ}ӕAzFFZ,QjOjvaCQx3/Y\GCICHC E : ձrPxVկۿëwꐣ?|{4li^|7ɟ}P[(ˏgZ0hWټ`adf.7goYqNs45xzUS7?%dt9|w o߈ ZeG/zxBhL J3+D)ө T 1Qn (=S~~ӰfwU^FvQ6/ B4 % !p]*J8*>s1.gIDz0+A~+&qg$4:B- *y'FdG+bX#{ ֋(frBtXqRLP^ ؓ]5Hz'9 qXb.۱ϾBYϲSU 7L7,L^4%~NsXT(厲&"e{A^>5Z&q}v4ْ+{.㕽79=w{ 8+uErb[kEN+/F4Pk|m뢙*=l|aMMpSek;ծ]R;>wtjK;Zb2"XoHh[CeMT#;AM%cpbe]NƊĞ=M5VoZh[.Fnutzi (԰Vxd'[3{57ޠ9d:9:Hq1.;Za,(KRVn<r(MeKh8(Q嵉c}|8gs'_~ 3c^5mfZ`c9)SK6HCưBDž#DM27fFL'otTrfb`yOW~$܇.س >|}Я8/ OrsaÚ]* `Ww5kl6 FǥQ^ Gк/'6,L 0HyKM(/Gq4;mG?;"5_ 0=5=4f ؇.6JX)F}'0LFN?Ce{g>9ˋt%ϩ''T;Syʄ8|?w'?OB>fw:a3A{& 5x{E.=-;\1zl*61r!%Dd Cl;Fgⵙ&m~,le1-xi#ڦbB`.@匢sNe6:ZJ =k<X~h (dö_\$/ j[E/v&J$QA#Q BKFeQzC,F06"d'7wԻ 4h:b|9G)=EaiAš kS2H؆ یZM_$l3*́ @h +|É>uɥXUCWWJZuDqD b6:pu2]]!V]Eu%-Og8[!:Ղ1t_s&?dt&>Ȃcrv*B3&9> 9ߎ卒;#׋42>m/"Tr; ڪOٲO@͸x*RN(JB+xIgAp'LU(jNdo1! kE*MP|FEʽ5vcHgܰ[܂77m9auj\pDm&؊1o\ٝLdnf*5k7h ѐ<"u <<ԾVCͨl՛TWR4ώH]eȺBr9Ǣ22 Eu'L_@RW\v4f33I2zJj4cgf\a. _˽?dqt(,ǯt;{ q2|n({gd;e$ 3S]j!2יI~nWC*COCi_=nlW> 2jsaFW?)_:dpR' ?`?Op˟ۦ$%v`=kfP˺ 8<[EQ펻j%Q4\9kv^j*v%d }1Sr \pG%`$(_"-$ &6nT)aWrSdQ&M@"3>T"14X!HPGbk7ĥ31 :dT9 H+uZa9a < I#jN0? ^&i" Bn->Ioe2 T;."&4rGnjB:Q!XӬ" TS sa_1٦?H+Ås>CYپ$<ָ/q{qT(&2EƋri'xIuhƒqWB #Askpaٳ#hkƘ y>n̈́ %nss;s3[LKmӚqouTwq-He2焧zB0HGLA5F hʉZ>罥BehuXթ+]/~'eʺʉ^x?qZ;ֆ_)j?E(_Y ~RRф'tYm ֱО"GRJJ[&JI3jD/W\P0yǍw"&0qu`nKI,%jӶe }VE],FtlTp'uFpVm;-ae0ʓNS;y62[1^|"toGhjԺ U ,fm. N$c hJ5J8\"%@ uA%CKI.5 JA UO:;L [\3C@q~^s򹛜xִ5qyk}_A{Pm^t~¤̎ask`.n:Lm/-sƝ] ڻT2J[>(+9N3 nh+!ٿP 4H4y'^ng_E9~ٛa<}]ddp".$GFyDqJ {9F%Et@N/5wOGcnNko;;Rβw@BS%myre u\q"ךf=q=.SE-OóWn:e\<2qTugo$jbU5ȒUzk/+v ]}>]: es͌ ͏QݜkWWdْAz3 W# ?vGf^]6]_֬%Ԓ[ɽؚ攌9H/ sIB5]khBF)@믵^-X;E)=>$׈>8T$\nY 80A3%\*R%3Qq-ZOL@s#o^OxeJƠΩm$AOC\<[yS) 8ΊYF0" t:ΛM9urS 5hdZ+ɉGPPPIHb :ӨH J\3uQ9m^X"%)C$ 8$F:6l5Fjo^ p0;NMP }il2LRPQ\Qhj>Lyjr@bt^Q5Kɹ3!DD71P*>{BUIT6>P\1)Ԇ_'h0*.|VۓW N f /#γ$.8Aϻ @6}ilbi TKe#U*1UAqf E Ȉ!Tx˥ТJ.%eYC+%I30M$B/0345 hԼ:$$ ʹx1BQ|}p `b'JoJE_`Lͬ>**H/D B2~SM<M t*R!I`^i {!c/&vt \VyY 8LDH<aI.#hb#:Bp$Y'=:gF=oӜ}C箧m{5^jg[tuKصFŕu0R:O "4^ݞpaqvh|N6V444&M(P1-9,l !%4)$lw> g47".Mu*+(F!tN,$AY#IqH&nhrm2U(p|-i~@BH*^F51nMZPrXl,ZҎPG @Op).}Ԗx`W5slư1X EX[,(HeHU=úW3->UzϽn1zር90L4y>d Hcme`1Q ˠ#ӊƽ nxrɴB0Ͽ&θP :i3y Un#dTn#:GHEr+!JhJ8Vbt:1!8OR88bZ 6F-kˆuF$㑅H>&nZH0<)c" [֍sOGȡ5.&G~nJykȰ|!~/Vy.>=eHZƵ H 0C#b:J4(DZH7ĒE!"=A)Ri1NXZY`pظ#4&ݓxju1,&s36e.tٳt|=xSv_K \*BjS`=M~a(R:_`x0>?;NaxGß&-ᛕvGaRk(EQD YKf YFω3_J2ө(nB.$h)nuyn T<(> b 5]?W';. SL^Ԥ4h<v٫:ܸJ\%`L"1XƝS4 ׽ ~'0?eQas)c%⹑`.bۚٚ[j~ZF($#r;#\`DTLh'Ŗo*A8a/Wuy`|&?ՋacNjxw|nƣӪν<暷nm/#R,ÜgTμSV0XFMbZXl$1VYE)j6 Y2:ڀfpP` ;`0CMGhLgv `%=ղUoy弢eO,5Me!6q(#h`<:e1D ęS^:_=6la*e &fsms˕1(]$ m[k]ZgDDIgaل{LNa^!(0Ob^a4UTscRsF 7Ƿ#3W-נSSʠm)Q,V;_C {gvg9hIT&bX(ڜ''W#ɖͼ<4m7\ C!fH ^ wE%KʶXD^@fM8{pl,sŶ"S@{-Fëݙ?Yvi SkΙU,V\ ;ܐU7b#ئV ”kN"e1h24D B2HA0+"#:=V)Q1w~%{gqoR€ + ^,zxĹX@O_L즙癏<[YZ~i2}n}aހv1D*c \#/wF+2.*I,9#ֵ\iNtVVuF+}Bަ 7e,?1hH)f3԰dZe%܉2X8gT3;)3B-=C<'׵cIՆׅ.g Lx \B/`0ܪH3I6Vn)XP$ŢP " -"l./:8#:訜:DZ)i&1+rȲo7Yz"Vl͒8iP/ S78I[Pcu7u%Ai^dQ^& HI2)rF!pD##0 1r-4<ԷRiq!%dL>D3#xк/o5Lˤ2$`fמ7:ű/gu"z~=9ͥjIݓ>^u_Z6T, 6JynὯDWQwud j6%^UI7:0'[UΤ9 * m,pgLpTKV kEpz߿ZG3{<n|xu;X] snJY^F?^\L[hAq$=/McK^m:<#X=Йr>}&=)`SsHFި 2 aLZ4^lˢ|y. }2k͟Nպq(QX@>b.\KN /&v`Jq)`ނ^34œo)umtcn(uEl} ܠ]X4,~RB5pj>TkN$aw >pwG``U ˚}۟M~upp>^=H`E>4<;N;axGß"-GTnVɾ$l&p(%d.ɿ2ͳrɞgKy>SE'٭Å?\HPS"!6Ca ׭O(Nv:',P%6hw\쨺T>( S'5).ax<v٫ìکK_ʣN۬ݺ 1޵6[ٿ" l;qk E,|xm;~L7ÒV?vj$4ж<<\>/5~bS7#~r5 0ic8w#WP?sJ+}6=9 *LF׊LEY\eX G:h2;HBג!]I4׾ϙ3e.zjVP3nNzx5pB_ؽ miLFwBwTLL;tY >%]^f&uJ_STbr1ex ~{uQ u/u6.Y۲u2?`zmllRZչs-km6\~P.?kݲQkOgo.s\k|KJؿ?'UH{y~nqV2O.ɼiS:^nMV9W,;]U❝g8h $S쀝L*vb;@nrhWxBre=i3H5l"W@ rl%(W?8NOF:\?8%.Wq =0]vOF\m_rurA=wNg5JOsvrurQjJCZWT hs \:ʵ#G:Ԅv2rd&-UGiC2OlzsE8v+Wк CL2GznӓhKaBr<WT hѾUG(W(W6~N*0S=^+ώƝT#-)ɴG>LF8=I2 ,((}چ]m8?]D<O'w;mtb[/M͍1in\bitJԭ=jUڇ#KZz'1L_T/~wf{ⲳUf䛓~d~8麟"e"Ӡ3Ƣzxi`?<ز%)mLjw]J.UjΔ 5SvoS̳ߔ73ۯNg.cwᄨng{m-{ɧ' is_;$??! ^tRLƫuv;JVGjMoO9z8І=`c}[jGȕ=s(I;m'#Wd"W@ه_.WQPJɄv~2rВR.WZ=knT8SGAE:UظS0CʇrLxq];kߏV^v Б:l}SnJd,2jb;`"a>ZV{$Wl΁mUf*rцrV\\S)(*ƵʽgoLO\9 q]~?~!d)S䦲~Th-oXƈk?ɄYk堎< ٘/aJc"xyEyQp?džơ3ClG -qhMOEž\MH貣7/WĢruro&Y?~lq׳_\ r}t"ۿw关O/I;pۋ ͊Pw3kнAWΦkdhͿ|ċbo W J Qx1YVz6l~5muѶ)|oW͆'9-&ڏ83c A^}:vv}nV/Y;=v%ܳEifgP񡥆/zKHT]}bAVo>{TG~!yn~FlIX>n>?;_g|1>ߏPu{Z}|gAR*_j1jgIj61څV)Qщ8eRd2!g|߽[Bx-ųoޜc-O_[Opl\ВnU.>D+KIpךY%y)&KU$yBRƨXb[_XcJEbt3m^?CQTA!WA6ùkSP6BRYE}:6'r2G ~kCuP!Z ZɤBV(FȴB(&KhQ;xKXM\j͹\6u- Lr[qlJ5IPˀ'p1:3i6c6YP7&eZIptlY -⹿0@fۗZ6$BegȆk>elP:jJ0 =)E~Rhh*gbi-h=ԸZ:=BP$^,@xdĉL~o~w1w1Π-3y$#Zk| - tڋ&X)CR<@B*zK H C2U.U;9PD,1lr6c:Z5U(E2) 3,RF ՚%EM#hDGJH/ѥ$j2]8alePm>!(<݊ GGz=+x-aoǦnvᢌ* 6UljKJ,@dP*Or0%[M6EG,Vՠ;.TEohnJVeSC`dހ6*YF4,3CΗ]b-pP6ZP*uΛrr8 [\ne*TPXYe3Y-sFPF5`Ғl, z!(R3 NS㷜2Yb ,_ ȱ\YuD0L fT)pB]bpbaސwSApb\጑ !, @$MFBCHLh9}w|W.=;wA oӂ@Гa+C¡Ku@y.}Yd Flt9ri!Z.=9}LAOph7uɗ*X `HjIVz 2LĴbyuF/C IZAd3XFՁ>S%H>zT_5P4&ݚ+76Dq/} ,:U' ;UsUU;:Vd%gf (pj1~v?7W{~'sU2s`I'> _{`#F`F$e<IrC*0.//)44ݒi!E)H'ZVK0Y|h-v29*սc- tux e_& nEK&6 %ߒuyBh΀H,Aq؍ F:[ -@i,V ;N}O9\->[a!$j** tJY)$cC:ܚOd2]?VkY#kFYxSsHm *t/ oFbVX¢0@\5Ⱦ[L0ԣ-Đ+ENza~ _Ƌ0M{{~uYW=3/-/ <洉LP3U)K12.&9t9_.)g Ny)o%pr3Y)i>m%Mn؋͏%By/n4:uuc);.C 5 b=^UI T %P{RCk:*(F%)*B@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P tJ 0!(x9#7J @˺\@B0Q J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@:]%>)QAxGDo@+ :E%x @B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P tJ U|>)JƊH(ϙ@~h1THQ J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@:%w0gS5køZi3~PwH)ΚA܌@,8tO%ˤP.YMxoKGyᒅiQpK:^#q \mBWUA)9 Gt5罡WF^uP+I8S}0'9϶/tU@J~ЇUio/tU2yR+MwR{۟VUAi4 ҕ]`cIo Z猪tUPjtuteiyA K֟`/tUUA1+c3Rj?~p-;.]텖#]rc=t+t70 t# l1._50媈f/mdM E.y!$~L'9YVT DtyDZX!.- M\C邖ٮtA) 4tGtŘ$?tU{n?v ʍtSK:GIn8'E6C_o 8&c0T7kRx2|YLs5Tf5QͧףirSZPG ~lc50q(2}^_o h=+%}x_ب% W%V NbeU09lyA N>wd)4ʰ+E0*R9uVZR,p51}q VrStf cN< JcN)U0;2+{Pju*(@:AR\+٧"]EW.#}-ktUP tut!0e ckC:OWHW'HWFp)h +ޟWؾդy%ŴS++a}Zj/Uսt%%3ЕٱV^5?~p%=.])Πك Ջ{SQr@LY}VnF^o*bG4]7-.p MZMXiiiIj} \)BW]+@i.nBWKѵQP'pQFvXbQNr{}tͤJ8*bfjQf <Ҳ`ˏ*#9iOt`M}AW$tJhVs&:̗n-:pq(YoS5nӅ=:vp,ɫ4wP[)y3!bTJpf*o!,QG/kNK^=>JWk vr GnwawX[Zy_x;h)n.p poF4'r>Z⩼N|*NJf4eynꇮ \l~zVn{bX3ƕ5WJqkyt& Eo E= NXS}"-'LH(W8WO_vNN dfo } {`E($F39RYLQ#)vQikȰ,X&E5]v bѬ0EW~lwWoc,6e2JUi]Wwn~*|zw=6 -&HaXNdK}W5WlNw(x1>Wb޽.Zmk. IhI ;]9K `!#-` _.s<{=ۋ磡5 ֺ{O5w%o~Œ%o=L''[^^r_:̓Ÿ)x~&Gp3 ޼\~c8+`ǽj]מ3e>]:yXS,fwꝾ\Ie䮇]%n?4_ w{egT3G5wm>'(r1'4z"lNo 7EmЉ+3#rHэ(z0 qup}pQYMr2<+qzl2 oY!a)eY?t9_yzaPf+%R|4,K~$btW _-)|"a]|P/S<[2OLL{/bs .m=6l>NϷ]CcvNOVDR uU% Ww{-!ݎh_U#ZaѾ6PȦ(ȶTdYFOrDEVJi3K&B]FeA6Q>piԚBxK#DC 63sTLLZ - 58;+6PK0T0@7/r>./?'riU_Ÿ N+F&'E8h"MƄ9Sc RP*e<.+,pM~H鍡jLypa,7(M4sL&dg*Tkp uWe' KKYiVH9i<2K#Q+G8Fk `y›9O'#a~y5!xtE}v:fZ4,iVTSC9! _+߭+zMz28@Fio[1 B6*Z驆+8`y>{HHZ"%'@90c!WS$,NGgLUn-dkٕ8#sJ^;zG;INNݾ_G@/qڣX4/7zZ[JXg@ھ,hӴam61*{W7/ڶD|or*aoL>G&tI!8L+-L`p5w.no>?'-kGvO^i)/˗ {^w$Xn^s_ {QR=wɩ]TkثT/QS"EVV?%%&.CtR+eJgiF霩DzGsD?@?kխsҽ>Pl <+P[`ս]Ͷ0ãw$W N+?ƵpZژQ JDhED娔 ȓHD{+VDv[YDXwG",oRs#%9SR1-98,l !䘌.$YD kESv6` nWK3nO^J"c(^TgƤqH&ݼAe*OlCpo19hH ьK ΁ї:Cymy~Oz;4}n.f'/IQ𺬃S;j^K~`JXyʨ6s ţ6,J+7H6*"VʌcoeL̜2L0Rl=.Ep:Q*8W,͌yUZ,\ZB\\xs?Kfw{*7MnG'gl+!B13DB KJHjN"+)eU:cL267< h&W:(i$[ŚO iD{U*U>ԩcO 1 GZUuSdb0N"]A*}8bw6#v-/Q35jv`)`+2Hڰ5[ꐥlYm[ף8y9z(OD`ǕNG|F'd- } !nţ7EWf<e]MPQ2iؑrH>Ik(V$؉;NB]i-\w”t[}N=]mi6jouuJX9\Q箯]4Z?Ow1쨧M6yy*AACH֔)&.zUiYlSi#)0. uH( PRD@fb[EӾt|SpcY;"\M1"]6H Eb&+H]<׮ $OCl?X;RtIXF)>m-MAV4Hh,WToìp/8oa6c\oϴYcJ[ jKHMȉ̞tvJBcԦ7W1u"5.jĂ (#2rVګBdzG֑v))B*"l&:J&B'oH ೨%׸(FYϭ̂ ZG'xk:/Ӛ9Mk΍~LiX&yn&tN ssɗxF~bÏz?;bSMQzB? `gm2;XoWAX|~/96]QśKI~yzO6?P/)ܭ=g?g.OyQ^}\jbf_i6_rЄ8z,m Tol*yV5yчSWp$G| /=:UQlKmï wn/ 㦛tHjfoZhd`WnBI!ٵ.wZsD=t cضp̎ ǎ1I@& Z=7y Za2_ZSEG?.mGGZl8Ju//MM" 2IS\S5dxOivy/:}bݮ9_Eע \/0xUtS`pW_enGZV fUjC&%-9u_rK궄h~:z! =VdT(H!+EXr5SЇ F ^{F|>&%f)J& AX|;YJv :#b<>vx3MJصCT>1“뗰$Ť1lC푏M/})+]qnNJoկ iC ze> >'IO`G/|h~0]>~?3L𭨯Ǔ/OOӳ_jG =m%/?iyG[QQNi~1לiY= ro׆~6[hI"S:I8.2jc#o䡋]דy6ɫòݴ i hws]Gؾ~K+JǟZ\ n4:<5ld|qy2X }5_zO/fiSȞt"{ UQ!ƀHMQgr M|аZ.'z'_Y5|$魋Y:WAd("LN^)XJ!RB5tP[+PG`RUM0*FbcJL2z:.,I<GWuR R; D/ldM2_v~sg2:fJ:獊&'d":dT?mq6eO6[g̾^-n/YV~dZ;9v|GvnG/m2am aC^ThSNRh\YEe.n1c :RAig-BXI 8d,*` v6=x#x2ʶն7_={fN1oʹW򕘥4Feۄpl,O|gL*dl)Jj&,ugԀiǶ@;0B# Bkdp9j; ٕw " /vxz ߗ(^Ny荊S$Ot`=yrϝ8–[olxu1Žs )iN1"L9S%WRT L5sTd ;I޸9=?zLڞ¼䳕=Í6W j$S !M 'E{B"tRE⥋ŋ`Ksm+@6T71L5-B9&NUu$w]a7eUpye/ȧ}W?= vȀ lq.EIe!+H 7*,u|A/sz| jZN9]^5*YCkg+ܧS8-f:b7sA{4k~Qu 3>hSc &B.M P2*c|On@n䳽9+fHo풍(.6tw]o-CI9DIHE'FobN3,h eb 7gg)K7ɏ^A}}B hp}B5 o8 ֿ](߽[ϟ_FghyIMo6oKQ?E Y׭ s{lݚhנ'iҲZ9j^mtz:M"4}5g]$u!uJM %6&l8 #$tS~= ܱZOab=CIAj. YEF6RINDUuJTdP^:M*f !UZ("6CLـ9x+%S6j"Bld:CJ\PU4WY"B )BHb -*AGu5ed6`p6E WwRZ;x(,2",N25o6$1ĈIjO.$VqV -,QEaJQ$!MOf'[COy;./@T*CR:-%pޙmd  bqNwᷧe>/7pu+Ij5[_9v{kG.-7-71̯2̷2̯1̷1/3,_]\lh޶^}/iW^']-z˳+O٤?[JdIzHئ&}7L~x$My.E߶8yϋ{*+~W^O[j[ |͹!ŵ6p5IH.=/C;!|24 +KD%IomEqύ9+k%u'OJeAϖrf=k%>j/X# S쾛&퀰!^Ti !08Z|"<3E.j)lU\t;W^(Q kLʲr Y V1iu -Xl{ g2Fԋpb%L`ͽ2 gc%ՇՓ{ŶY([U>UeR͎dPMweOgeUgSABW:7x73IWyV.2͔SWAEey]D*dTXJE'j˧鯨UWEvO]^.wI&'F8-2]i,ܼϼӣQ}&.L׽!~1ʦGW't>9<7]olsW\T89Mo 6WqVv1]>$~Y}|O=ǒ<f'8ss]SUUY̤LKU"Gr[2'iUW7l1aϭ tl7C]3F {cixz?o`M{|~Xɗ.7vGBwO滃,G}|irX<;^Ͳo4;)]~N5Wcۻ {ȿM'UtqM<k{X$8rxMh7ώ'ofܠM Wv/go鬮AhcCvgסN׻Z-]zPY6?z5 _u@"FZܰ-' Ve<.Wn&=g`U=FU5GN6uq -ZmHxLIZNn@n/ޛ* )wyWGw,&n 'ZWW'lѹoKѪ[mjӞp>s=UYI]*]`K[i/evLq_feҙR6~uY^{zev} :MTuԣ5ޗ5F6s5QpcW9 j)\Gݏa:{okսk3KN <>m&Q{nQc\O*s\oO331YEhfJYq-2112H)DYf3VKe q+"t])A2mrz V3YUUF+xx_Z \5UT"UZs"ӕ*{e:f>]N2#+SU*^Wu3%w{X d?J҄!u!ke8!]tW *BZ|])]QW[) } q=5< ),cۻt_]!QdtVSz0u5@]9ݟPtSК~x+6j8ê"+vt+0+ )uljFtnj݈q]mW]5kS{gZ݇:S^+UWmIl LX3ɌpV_l)ݜK,!M#wd4 i]HTT5X0g 9q+5,t]!QWԕNxIHW832jp=]! )ړ56jѬQzQU=ӏlZҬҷ(]\PڧoC^\Mg<yqR f4̍f2OUM곲NqV. +g9#%gpGnrYוr:iqT|ܧ"/m.&%{n\!"3TB%{I R7#C PB`4!.TtB+QWCԕVJk q%]!&R~'ԕL='og`G z2A< )p dtL >t]!ekpt契R{KFW8+B@Ju|Ĥת;kߍnU7JXw1AWt]!QWԕ9!]!+UQ5f6j2N;M2Γz2N+H)ct5D]Yt' ד -:t]!䆨+y,!]Qt!&Ԏ6bHbw!{8%]!puåfi]RoGWrǬ8&oڱcaF2R:k+ XFWo7ZSTٍAvдksow}uFEWHkMB*QW+wt%!䚌Wh*BZBRju5@]I᭻)"q4lFӴƲ4!.5-;]N˃"&K)`q! i )[2o8fq+nWiZCZ|RzG] GW =dtRJʨ!2 +SWyƇ+tu8pJB` q hBJۮ+ < "+5dz" )ftvz5bI߿:+weynQ(5 +uu-CSS21V~,#,o{`RyBF'9Mb6tM#QԴB FHWl&+=twu^ۨ 4iu }Mc&aV$WR|n|B]wL"KeT l""N"WP*Fl CApB5B 5&75dt6:,R6D]iᙤ"9sdtQ5G7Ju5D]E=vNUu6jpKHWDWk+*t]!mWCԕwtBiB\MFWH낏Cԕ1&ҿ?]!pC<:2ŐuNQWuwz=bڻ۾*m_U7ZSgN"u]騫;ggu?3+k7f$F἟LnPZ2F\hh%3k)QgWW8]!nkw\+ mG.oҚ0Z#M0#MB\ȤKI_oS15:S t(]Ҧ&{mdzOu^=6$"<3E.j?­EJgKsehy IYVY!Bz)ի3{-f&4'O'#.g(y~͏^fˊ^tV׉`s(vMNzϮ)91NIK[(KezQ2cYQ;QȵI( #Rx:BOˇyzEoSQZw 7d+ԍVHc3J3!]!dtQѕҜIJkPZ+|FEW+ iUBJ[u+ 㖐XщWUCCRصiqRAvtzb"n"wL+A3B`HD\g h]_Ku>jFZC ;NGW+tFZA&ѕ1͈Xʬ}7wYWhGW( KWL՝snMKD@( ô:,otsUkCFӈk,M# ^HE5WiCHW+n^R.ʋ(^\z u[ӛe֌m6S &:H4[G O~U(q@{1P>=F6[aj~ſ[ ?Vml6i2M3|Z]$8=cjN qg4/fgPX6Z#1JßM6HD%^s4+&[.v8K_?*-cқC|fݜ~n&Gp7NIOP9쿼7fVyſmܭXt@E% ʛ/.˖0̂B[pm/ͷ%3נwīO1$Vw}֙~g@ͫ>='5b)`eBqL(W"RgjzH_!t]/ 0XsYa䧆E d˖&Eq=.Kk$RfdxUqtѵ,l!_`)T 3J1)0  ء> U}v_~;  uy;h'1o{%7SRg[sE(qLaj[ ɖ\bwSH:gr3mV\J9;^؈G0kGqiI ̿o+ +M(fNl?7ע{[2vXc4 mYcPM:͖JX*0Ȑ!KXh*qèic^[G |e| @27}LiM +QťEn(pJo@ezk|#cmF3YK.B@Fw=5(!Ȯ䄰w|͐jPo]?A6-ۙ0&X!aAYPPR4 &?xxfT*1a |k)6 AbςG ӄcb :Tj-T΄#72\Y+x[ t&yY {s6Y(J892z"XBUgMy'T),d,3P[ ()0Lw Zq&.R8ƸWU]%է^c9cIf`;ϳBEpZE((X(g! e 5P #O ڰM6}k?6qCh򲐁ό>HJԫy,ԥi&+f曎1*LۉFdL|cW_^l—*η]d>m.0mFL`-$,>&gAuPe0%A0m^%.fFVt 2:Df2X 0_*XX肺az+R|&Q*L&zZct^[`X,.-5Z30}fHHٹux4 ;< c֗ՅU,TG׷ßh{Bb߉lxe&[ _+H jҗa5Q~cw}v%R)\+`Kе2"ZwPrI<rbUo8"j]JX;2k ) `L:A jA *5hIVՌ+VF/ -QXyh`ȼ(يGy݅e6*I@̴dJ)C2?Amj Dao<{ 7K_ a2,BdXP uV 3v55UBJğ?4:>5M6%PYSxXP)ƃdzkJn,L퐄͸DྠGosCzZ/`y+w%@]mˋ~ǹ~:,90Lt!eO`3=8ؗ.> ;ZmQKǴjQ3y&4fex31g~y5$iUFQpݰ)a!/{I˩4ts ]a7vhoKu.0)j*:T2,0RK HςO(ZR~> `oݮ7ͰL6U ͅ+MN96ᓻ~1wU/PQ j #2PX;mQHtiFR<6,wuk[`r umJSlmuLGW19]n]UbxY;j׃*HLJF}32[T;3RP:OkWr׽G6&;/yM[pYP kbV`o#0h]| #+iq0-z|dNUŬ"8 NrC)cV-BLC=` \8 Q5[.ՁҘ&Dl0r;@gԊpn V3v KkB>]~FzwW*[P cQyOYry9 x8fCs'#lPϷ̯?œ{?2pAINm()L 0o?qg'7N.k} rz|~>Ot_./ߜ O<8?c؟0#g DMY3SKLT~n' n& N}(Y4 tIy~iH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 tI Lo-}&Ia@@IIcLygc$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I Mix@PfS %9$f@ޓ> )h@C,$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I Mo/ɖ@PfvLh}I 8 5 tI g&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@Ǔx}@㮽s'C Πyn#&`Gভ&ZKe .Cp韗T>,;sX|C -/_sq'/KzYe|yu5Jӷv7sRGBܽnoOX2oX;˯/Шu>4'@ &|N_ݤ ʮ?^!xNߜ{n{ߧ+|+ɟ^ȀoLNG5/z@IG@wmu]J$>v77r%O*H?<^`z0Ʌw_]ǰ~nxqi-j)=lbX]f+y*V>hS(kV./&;^gf~h䃙8Ƀ??..ܞFf jG?Gr<*'f>]YiLjD M*V%fNFQvs"c:; -FQ#QWHzhχ|v]Μ/vb}uƬbg 3VrCbળĆXX]a hWW=[r὇e?(k7fг0eFxE*8嚓HY 6Z ( RlĊH,^W_t4D{S(y̝vX-"ro0CJhE [0ׁR/AS а-0fKX>6IrZxn Fqmn)WĽO,_ -u.v0*c1/*k0ީ$F[rC Ĺ{J"؛M«\*)v1|4ʼn\h෋bA޼|gR(/G662.*I,#e`x4X[p' 8jIg辷5˶LsнO!(D3԰d;[e% u&b5RLxTv30-#z!U$)V[hA #(H5oLDcBgl)\4eq],J``9j z[?Nu~ h'km\V"OW_"v8ڪ  2p"!$GX zc7BF#R " Fxb^1:訜:EZr!M`z0Fq@YXKp3"اaLb XZPEIcX1tFC4s5('wEu49OͭoPcs3I)Ҭ`àZ3olC+1T,DzN N3 i&1488#3/|>ZUOy9[H c4!>iP?j& s"c,ӜXEW^]h D O@: v$ ; ̘0[5DEudžvW?0o__ };ƃe]ԮO>u~8Kzrg?=deqI%<(m Ά!RI ;ЂA}7+~x2/;VtBE97̮+Jͫ/^<{uRڜE[ -Aas[qWDс&3>0|ykBdQaӍQ)Ӯ T 1QnL=:1zn*i֛Mjua$lQw̺v/ vb;ّօO\UD@ G7%,Ho&y%=oՄ@8RF@D%ױ ;N{Z[PdpїRsDQY%H( Yo)uY6hSI `zO2>?`4Qq$OD]!h ssnoCZ`cg)QK6HÀaP)d[19ly/`M u9<4ӷI(rfjpzxy`8v++1Ysh/37\< ͳOwJSM3P;c~L;zs= gQؠ=`F)l1+ Z>iHl޾Bӂ=z~&nX9Zx} 5}6lZFSL@SOn]¾}crC3_L_x@nUE{d\Z~N$S&ɼ-e !g]3 ٌ@Qe6joo-mެs#`:~z[x$8ن jmgEʖƠMxK6QyD2GzCG_uX4qg0]x5z;^j慧o&=)@w@T)`9J$B koTR8CnޗnYޑŎ:8{'P< D={M erv" -(Ǩ F#L)j0DJ)'J8,jVnx㆛uPV崸DqqG|̊.Ooyet]]PI[AV^;#abeJX1XE6{wWT6p/Z*,\S-yQGZj4RioQ3k͹82eQh ȃ{I>5He (W$6!`B"aHve,sKȳ!jgcЦd#?k%C3[-?owQ\|bnzӟN!A rS"LX#VJ0aB21%4ѩpv`CNX B8x[ !!Gʍ^ywۨD9wuڐpŁ]3&=1<"-\1™gFJqrj]o\hC陏F?ڥVˈtNÚ" C a$1=s?YK=\nd7etne{?;niVl=;1"ޒ-΅>ȸ(8?.sp/1F P lP&qxR/HGn\tE޹/Y7iJ%0RjB*.@&+cԑCRXb g݂{pK͊B: .l  Ӿeokb7Sgew[-خuqà#-cr`1X6MG]$%-jHE>Ӓ)E>i Aҷ50VǛ&yq[Yw$=D-KiRayj'_DxrY1%2 &zHHjYT}*>FEc~" lxx"< HEXJߩM]UX-zg՘IDk1h t9;n @-{e)4LayZn2^6zԠh3Ajt d:/ڬbNx"7V)˦&QsU~ d.&9erpHI:ɬ&=Ac/YNLFIQ0! GB\$:H1(L.s`U NP aGniاYŽu"X|)gU#j% 2E'Η4!~I>f+D?x.}r'-8?Q G$hV]io#Ir+x#8i2rfU &{𧼘o+9E.?Ӌ9Y]2LhkyD} ?Z5=s~nahǃx憃Y1Xq0[R@7 m#YCn+nLpƩk^$F=ti:o siVw 9.]bIfl WI-Wp7zg”Zi4돫(pPQ_Q_&{||φb돸ja8Ŀe!D65pK&9=[5+'>.ge7Ժx̀؃Jɹ -Q%ɌG\W 1 G^,&gsc`h.洊Z8sR @RŦ(]5<+S !Ih-KEKSս{\#~sz[SCdE dJ4tQK"}`g}nUlm vu䝸~Z ` r7wH7rdI*DQMHVY{SY/ě 9v9x!D um&D~z0'Q,M9yF!R16*pFxE JY)Y:=VoX5aѷt.-\|vz5J:8 HzߔʸE[A0I%9h+D9y < Q=E=ŚX{EQ.gbN 9|F3GocHd:>{lB=Ue!1BӀX XX Q貊8yDujcAǎh^&Z)X9E^\4:"$Dސ|O@9+$PZͯ:cs'6,ɠ~:pOӁt'=Ѐi pNF9+a1_yE2$yNYRgKg/qčY@"9$ZLb2ԑJqA /.1& s/ὡцt<,nP2a4ӝt/?{=Szߧ/${{J2yϞ1]fYaZfC3燪٨ Y4 Xx8 +UJ›d{5:RiXM2#u!ݬKBT!:eLG$}Oԧs m$tL7{Žijw'{LqR軾;`ՍmIxxmn1RoORO3|%>i>LgƓXZ })`dW[C`V.ywMzUM-ǓoFt6gU/~jhjy(ŵ 4~1ﬔ| g8uJ{w৅6*E&%i) mꡋKomt/yZ~Z:w=_sÂxa~]=~8[M/X6`tr2`ïjmeŢtdb2B<,D?3ޢ2;N8ܰ9?Ϯ x:10bJUyV1eL0u(ojw3)|,e{1,vNt(I>ȁi!u唔R߆6{3oգxVh.wӫtjQV~8GU$f)4# i5Q!iUʘrQI(=B̨1fYY, ILEt)ڜT@JǹW"4͌yfƴJc\ؘel˅a.T= ot]e|#PLMâ?rjx6rFE&$dB>3y,I+ћ^Ikԁd#U2(Ц&hM&ߎY̤!em:N)83⚋n:em0k{{Nq@^h&1b@֬@Q&8#C`!sfnm9O_!2RĂe&J?|*2RR6<̇S_6\Mb*۔}lˈaF=#xu*93b+oafB6)M!u!M!M+!}PV[.Xҍb.[!Ja-OqrTq]Qe-@x$3a@eHYrcr$-J%j@$ge62u.3lXY7瑁Bu1֦G}gB!0޳WƯ{_]pr1#gƃBv:.yXRKUelPLJ >\AA M'`i Ýx)Z5My[eG_6QNwݷLZYjq~Kwh9,̍F9x2ooMY5_YmK*͙0[B{9K%c/XXb=ff!2c>*’ie7\&X$ T# $MtQ8z-+2kd-tޅz{ }wOSϊႭ_r؛&3ePy6޿ٓ-oܿQ %T::ϳC#U@fVYTeDP-x;{1o/ m[}hm+,;x|.IX1MCQl$ CKHA/Co7s:1tu۔:]m˔$Ӫ${iua:'_ "z.5܏$(t\8!ft]uU6!?Mr+EsKď]?lHpQMo睇XG4n0.^n"BA@( +}\.hXl(jM)w4lK*80I3A,89BlSB{F d<ʒu}6 Fwg'R0|Wi՗m^|ۈuZ&PbsȾLՑK}e]m#Yѯ>ʀJ[w5vev6S$HXthڹ-F'Æ&8^&w|DZK<' JTΓ*`Q |TbvI:n-ݻRqb">Aۻ0{raJt]lug /PwBvV-/dWPr{VUSwպ+tUZv*(Qth#v `:CWPUA /\Pj+Q3]/6EW6]֚UA7HWH]ؚUq w 5U xȌb'~ih՞-j'tuӣ\ Z[\pJ\_rUNVBUM+0`Xc&d<_mqA*&@]߽sscdtkmWhmr=KdOoeU3tUUAktUPbOWo$Grv6e$}0u_WCzQx@'L ;3zpjJ-R b\\ZOK#3F]`ZlIZ"qa⯴lsן=jֳ&oܢ9FhJ_eD *X+J(gpXΈEW]RWPͯ?@V3tU;zih4{ztռK b.v ZSko4ҟ]RWXWtf%v*(׆]*t o6x\ݙvA,ϧҪ ]YT *pt ZmsWoP0!5Q1-<,k Nu%X牳 x0Oz&$:zjwu&u(_.;凖7-alpfs4x0lÃl`NΦ)>p-g"($2gH.}PmEq#I+q?$ء`dr$c i #KZQ7I=(dK<"cU_WW5+9*DoGKVt8ɫ$l0Γ'S2ǵJγo18G)-+0_P]FIu/5M _˓j궩mTP#W*E/pK }맥l@faU{\a nF#Ud-ĝ+OC0o1JVZ )S+vc90;sFpy3bDΓ{ J,3f*ۙ =AT"s4*sh5jJ(ig^H4`@\%5 rĚnJ:sVyb2 K?R^Ye,:u~ߋV 8W(Tp^w@c~1vmVxDZpWc`|rH8+:f5ϻQ~ \,IQN)93ZӍC6aV5!j )~P;RI!X""sLԝzJ`FnJnJpî>xs('\v+mZ J`-HpEk64+@qg^RX+ڦ X֘+1kJhY74ʚ3W/\?E*%Hs47՞P2ԙ?{=!;: >wtAp9:3: a(I> wC967)g)LD .ΜۍBL`Pkte[tB{ORL@3 A*`V p 2W -iJ(k1sҸS؂Zw?OP~]֮.$:l8'ivٚSb( ; lKp| Ah)"/d#/1gf6\%>@\%kՄiuꋘ+NmbW p6lnφmBuUBIqg^ xu6.0G5*% HhlJ(9 4W\%g?=05q nbPJԅýDs_cV=*e1W hKЙCKya(YsE:s׼]F!= h9`E,%QSԺݣUSxX\MͬH7ˁ+o.̬Ӊ/c&Uonws9[ Ms$`Dz/5_.]s_+ǐ&#B~ǸhvWFub]Aza&E8F  > >?^Ɠ9{s7v?M&U'Tv+>KLzϕanc_]|o.w1Mksjel-t3VO=OG͉EL KB[ô\´ZAδ¨cZ/iaq-`:RdWv JP8oGau#d9%Bθ ^mv5Ayv˿9bJX1XE©ܧWgU󺦕V~+/&{=ؐ>^-om =D~M7yvVάR~I㊪ gj6?="쮆`YseWO7 B+oʊB]jjU ɠiu$JEK$k͹82eQhL9^RƄq Z9C߷.-b!CC9(Emqm$\.%. ADB1|5(K)µDX/5^#6Z{f8I(ҸTGeƦ&tl)x9RQ;h)?i,[Y>z;+Y_X<%x$l6?RAKg6 pl徜,=5-!j+wS[LGaђWXu}>-gz b^~WuwڲСJ[Bkyۋ!)owf\31،*WV6qv6 ~-6ΌFo1}V6-@0=⁖=&v FEh>*5wA;_`mAxZ8.̳f4-ϳW1D*'g,,z(sA+33/\Yd_)sp /1FX mP&- )&sUpϙV=\ZtQEjn'򢣼sGl[ӒJja$jrp 8MW0#QP NKjP!fHm Y@S5X0hRs28AkOK )x>k6LKCWv0vaĶI"~,aRE˔˓'m${X՛9\0-Gl*L9 EǛxeNZ%bر3#qt$;$sYG IxYtX3Up~ݎ˹k!JO'C=.wU- Nϰr?O|fcx.ˆ;S!T??_Zd%L#_t$|}l̿eOC'v52~?|p$쟶{r]WB+y6X|7%惧'IG'#IX+zKզ :TQP*Sqǒ}b;<1m5;fŴZtUKx^z,Atmi݉Ǧ ԝ(CWUX-zg՘IDk1hH:dp j$0~U&Nܜnn2ܖ3E3a&OE~[-g>#݇i'w'ͼtOWoW3;DžUyL1Ahly;~ Д2Ty؅7`vRl-g =`M SE 'B9U)%VC9o%!\|"_f^% j$(䯀oxr2*yl cDQvii}<ɇq^[LE77 )EʕH|jZ {9|vWIwD9H)wK{biE׺y Wð⮘7+ g5]٥{{|Z1M(CIKNW !bZUЁ9.۟JU釫T*VhhͲB(C|!WP~V/VayM&ԷpTߺ<طVz2ɞy^`\ˢJnv%.%7ǫ~ljXzUgMXu3y1O t1=Q6(n ^H3lh06 X_rlЖdb՜l'ay٠0/`N'G pPT#*X1rƈ\Nv٠']Y.W<\w F&r2"*uFrȊ S0GRrŴ5s'LÅ܍n fUtRu;"E Z2nNafXTqXFH&ȣpc|: W]bveG Nᐒ!u0YMz"o^ 4hGIQ2CJG,^7.DL 4<lAqL0 S)K`bq'(e`y<"V76"<a~;m3'?L'ݬB %4{_=_TqT!"P(ɝ'r6B:ņRb\ .6ꤤϡa9$$V8m0RS-yA)ıH1WOIQc>XOd"Xz$=V 8vu0Ճ}?`6u[ P=ƂhN`m[̵еY(d44Ĭ:㕵zKA' F9T!zeLGIbOԧs +HF8\Sz=eE֏IٜјgG^hc[׾?@zDd"w.Uw<ه)M2 s H}jIIM!7Z3!RzgP֭ 8Q]H*[I+^~ɋCNBLPE4P~n-sn`ڭ/N7eoEޛSiyGZ5*ߜre)42jRD HrR!*}%:59ƌ6flPDIH9W@Jڑ9جT T<㮱PWjJO 2޻ڤU(fgaQhat4?1LH9"ɘu$HwhBNnW9*͓2=B|L"Rl(aSj4h&ceI !e'W˜#6NqbJԮwڦr6C8L`fu*E0)5+R֣QгGc({uqB?|19#gA9LL{Fk1Co,u䩥6(iw+7e!``RfGnpЇ)@IA^PAn+H1HA P[97w3Ե1-{t;V0e>Zrʎ_}jhQGu^1s^,qoqWi\x){W~ھ~+֔Z2m[H^(o5goɩ8m m 뱭 1\&"O J1?,jȭáV <-YCVJP쪮Uv7oT2IRGBwy^xu9dfJF%|}2$B۝؆VdK}M|lrZ{9[%AB"|\%%ڠPwUN^л$˔l-b`F^fa} a=|  &1\pHĈtyd~\ ifYlp[gj(qObBi6 (0$qlArTeNEE 0$=;P,_'ӣ #>fۅ@E zB.֟cnR(ODC"#J:D%y+a9;fp,8Hp\eNȰ9}Ϫw'Eω RʁB_͜{8 d2ʒ=5]) Fy,Y:bSw^cmDz-ֹRًZLՑKtCeMm#XѯP*ԕKw56?1[^S$Z/0=-FODGg%hSĩZH 椕Ny$*=Q@M8YWGG;ARqbo-p(PɌjcK?#}gk< ?(TZ)%]s[\}A<:G_}=8z/<0hsqϳd@o(cN!71 u*q'P. F7+W(2^Zz ASiuQl>' klڕ9(lk-j-@|Ry\|#6o;oOK:2SR9%0f^g$gknt -EN/r\%>sRE010=YNeќ˸2c}\ޥy%nZgI8.B^xZ殶szpayFEs<1^} *[QAH}=if~q+ 5DX3u1bS:h|3N8'l6Yh!Z9rA<_pc//gUGr:,(XD [m-5O1ri0Q%Vop r)x"gat'.bۈI8^yiYtrdiкY}0y~k~ۚs~lAާ]3:*E~LٟN" S?{WFO8Mss&{.|)Bd'A[X/XJزͮfVUW=䚝QVSnx%" +(A A AGV1!s.:ݨο޽Cۛ|^BP no>m>MsJ̶*ݙ9ȳҤyuս'Mo9ws/r3ꎺ}?|hդafTmŒAw}Y5W(Nʎ{OD<ٞ?f\\PP/V2ɩJYO$ sG 8uOα/x筂ϟ/ϿM p0A닓|ܢWpNڭLQ7_N~ç_MN~y8E;ʼn=huשֿ/J:^d9t_E'`oLʌQ3c{AbӦEE51ݤqĻڱ9]#D 9~eج?Zld, w,..P3lS޶l⎚ibFu٭%.=wʆX,D1jifz=[𬳥}~/ QL ;Z\Q5Amt6ΰ ڳcL}g)KY,ʬ@<ȾMmH^2V1QXjgAO1a\2*W fK[?7]ln̄FR_H^܏k<]Xu\8sR:t$y!Bю~jhW)k/4: a0 ,lel^ ?!VeW9nq.~_p=o'ؽ P ?^W~xUvNIi#-i"f‰;W>̈b>yN꤭/*~" W#!{JCۦ6'G3r2Ə^HK = BVWY\;\e)%?WRQUX񃁫,>*KkžJ>W,2WY\PZku{R#\pRJ+M9BqW(2l hj8; 2(9Bq0W(z ZLh l=ZQB;\Bip_S9["^'}7ϒJdx7{WpŏpҘT>9\|v*Oj;6ULz ?L:G=GVs}{*ꗂdz7o*s΍b|~P>>W?=& h*oW/TߙH/+F%go-0۰[ 4cٞ}F2ګ-KPU7!.\ů?`hV{Wk!s#scGq6B ^zʹs)UvF­:č% bJ`Db %Rm3:}i> U{i`-hYpJ&f/ } { PHV'S.yg+*\VSLLNpO̡ MNp|"8uCIS֣h`rq;&'t1ך&:G=u!HBmGp6hx6mLึG7C Mʛ 4J+Kԧ &^>ȹO" `:DvXE 8F\BMAUNOP?Sйr [Ml'\qXFkQn'iXjd8;/pRBy6(+9)Z6^b6&O3\v|7#՝2PmVUoE^-}WYV34uj /:9F+1eXSS!kͮᐮ$rh )eB?e [ŖLhCri]4f=]ʱv~cIZ.`IIfyƜG@^=gԳ|EB7 c権aUSsr;ey2w]֜?.m!Nae|3|iSJL d)M0hx@ռ:6 x@"Jx+`7]ƧFt8sSʲ8XZB*Di6S bl$V[ %ml$EM\Dkԑ&a<\25B䂴ECfDaRk(c8p5qs.AE`j\|ԖhICfYs+4#".B6goz% Q|^KޚzZhj(QOpL<ꄷ)Qc RP*e<&+,pԦ03Rzch9B< Xg!D)oKgb4ј >1bI?*.Lw$}JDQ}r:f4P(,iVJ)PN8Ϳ"_~S ^1p/l&&lGepܜP- s/3[<07y(?QIEIPIy9N%sI4^Re1Z1`\ȩs9e '"kM6 %nrs[Y40mUNkƽ_kAp,8'<%3X(5@sMXbtRA 6*b"Aj4)w6jڨ=9-s,y:y 㗽O[[Fq߶ojvq&/4v0Z8(TԇJŵJQ5ؽ/$T~>jb'.Ů㊀+?W*WmRJ%"j_%j)` Co2ѧ§_:7_𠮣]w|չ*Nf.A?УCk&Z[!f^;ϭ-C|-+ z X:AOc u}C#ԡNgl`G<?w,Ssy-]t3>݈Ż^}pbj*<2:-]JxX=Ga c4Ĝ#WUcxVFz!RV,`SHG+"U"[y-X `6aS-Xk]C(5 I! m*!"tJTģP%Fv֖vX\פsvW(kˑW3km[c?z/dLc6 "f +?0RV*UYIEr:v;" ]پaq?EX|?H IS@rySВR B`\r @JIr@ es" {El#nWԷoθ^JjIGYptN,$AY#IqH&lT*Oj@d2?K?Ep,£h%ƁkML(UJՊ :{RsCzyIu)_q1$Q9j$}ì=Cev>*ѧRDZKy3R΢"pd!%W.Pey3{b{#ʘ8e=L0Rd=II8 YH(+J#c1qV#c9[b*;ƹmϞ9&I~iP^op9b[K u2&: %$^H T'$Elnx8 s=bCMtP+5Q'툱 $Vm #v1qV#㔋/]lvlں0j#j-إv&4V,ˬC5j"2l&\E0I]x12ZUXŀpD/IF\a<,&j< 1F7vl)"ˆhxDĥE+D"-^{곂GSZeTX()8Iiʣ,e1|P`B@O ͝"ˆXL58G\Mؕ)YllE{#..# N3-Pj ( Y^jxvD"X+ Hęx=b =8+YVWv$[;PS6e.miaO}U;:>KC]r]_wUP8g=Ť***G?\e *8/E:)v {Q?^F{eCS2DHż₂AC%,H}Sbhz|ق2ӣEY;/I]yoG* >-ﮱ[l"}TK\Q$á~W=ۤ$S#G@i_UWWQ녃DU4G92f=XI<:t1b ヅmc,/N)Q i ?]alp"+dFS#!#\QP7kѹ&`Mt.=vf 3.SjYpZxPzҨUy4h2VjԦ#G!.es ML@=2g(WS7PR.IB"#H8$"Oē($}Pi\7*NX  I$\N1L[;-, X % -e*q*뢂A*0yfL _l6f'QmPl{m>5_B~]a,.]|z9^˺Qr ei(w?ƶik]m|=vrNjUF ^y7W05hвn_/y9tE @ d`iWexޖˁN2D$sR|z]`w90jdDDԃ&AGcxiD Oilt@4̫`]TN[HIIN'F(ɢ"-ֺsx3j<û<мvdNL5KH%d :1Д:=!H))F '\CJ* lr:-Qg44QM > B;϶ MSD Nj5Ʌl㆘o;p 02.HV`G{^Wp14s@ւ`,Ki湨U6K4|)$'tq&w+#{ƿ-hkan ́d4(ս%x]ga+Cyނ 0j}K/$ EEǸCXyi/k>#k?uRHJL#(nӌc1 oZ{SKIj.p$)N]*F^L+MxjxkݹNmJLlπd;t )s<9BrIY|?>ߘ7ug&Aǂ>RK>L_|?h&$73_z O5ǚɳW*TdOr4ywR9]C/[oS߭R,:L/ݞcW %`g֝<=z z t:u<>?.V lΪu,g3ɋiz~˖Qײa&G'S4ޛPc`PҢ ׈L&ê=@ͫ3`۟/"lX; jx2zryR'-'KLd #dR**kuC}VgM(bP8.AV1vֺs3oYOFi({iebڮS4vfH%͑մ?hgVEяXt|v5G+h&&Y$>3\gTOG#0F.F~2;}?%Xfͅp m2[vG2qi{krifw߂B P_L)sJE X;.7C@$}< DbM!:1D9 O]@Bekau4:qTaDvo ~kQ&NFIbnyR҅p^N|ЈbCE;)#LJyKB.2$0^ȨHd1ގygS]TguW<\Z|x Ů)8[m<~#GS(3`Z-k#2JhH9pVTf!R L)7wPnm_#[X[Sh )2qc( Dh,@{Ђo[1 B2*Z)I?:+_Z*d#VpSZ{(rj(P -9A9wSۮopv#qD1Mo'3QOD~ &효 ZWlѮA7i=RLajF]"! 1O#B&Rx#J, Fu5unv}Nq;M rT$4yb"%.q3ʊQ5LS%m&9tӜO`謏 } F99@TA_6۞׻/upU7֪OŷBp4p¨7N9MbLL:AzNytG ˑKMi&u'E"dZ޼ N0<`Q@ M&Xp\ {dTځ0O H|g(7"p~3Fgղk\SoG}a1QQ@a=22%K{$'!' 4YfH)dCG!LN#xFyNION2J8X(URx+TZP4wRKJV2oxHI<['D`FEIh¸dQTl!*$×toލM#W_4?~O߆_rX/Q#]_19Yu?nԕ\~47Fo-xE͙9vaP؟fIsHř {}5 +[ʷrb\Wwo豩՞յ7^Aa:QK5Pmه>TP r2f_{O/DgYץ'r؟d9Yf |?ll.$;r I:9)ʳiAZ|E1X|uRТ=iqz*\*7FUus܌Qc캹icqE~8w6[$\M[(HwOݼg/[٫֗/of+'Wrt|͖?~`rCj LFM;ߢlHh&8dӕ=~O5lry5=|Ǖ:zv9ώI@Tps?Fm̓=͏蛋Հv~p}v>5/>2EyA s;6NJdy?fèxyÃ\4FzI,q9˧<q%ϮXb7pp|=ͣ/g<;~6Zwu7|0pa?t@)߻Ik YMyb ¯~1~C[7,o3?ӗL.C"O5>L0#Q8F<+5ifik4^v1fML!^>0yع:G3aJd~>'|`Hp]:=g hf^op͐ܮw:3?oa8̃a-6v b^ڬ5_}fk{~/j29,*;Կv$"G'\d(W|H:l}cCݟo^maϪ]]ԒQUJƪLpgbI7]{Mkm4֫{}n{N> yvѯJvE]\5U>xK5zϚ'kb^#\5e$_?~k=šgƒqf3Y,g?^hIE跿J%|O+ti#n_ܵ=8U,~r2[|.?q,j>BI_`}ڙ yY,G{FUOADWw4=mBW@uJO@W{HW&p?12 1uhub:jo5 bnC+U:]1JMCB91`]b:]1JcttDW ]`PNWt+eӱ^ǧGZtZ6Cnm@Wt;W}{Q+/V7>?gnʋI'+7AN3T}s9EhXhTPcTλk$?Dc wζK3Ҁk K3Z,(XzYZInH+Kj(thuJ2@WCWZ/GWpb`K~(thu ābqGx^$$6rhq"k49¸~r>bN}P7+ΛD2н(we Ϝ۵B"-iFO/.k1/'5-OLM]:"tɤ5eTʤ>P5r_OWyղhx_|e֙ndoYWK3ɒ MtZQ V"rw7SRBҐD!v~0z;Qh?(<}F)+=bf0]nֺ]+F쁮"i  MCWvˍQڃGriDWL_`@ö`ftt=+%!+l`+Fw (^UDRWlpt3Z-s{Еe՛cGlFnJt\tZz4;tetuN,KSL'Ue`snoCpb@4- Yi+Ph*4(=Ҵ"]a0tp ]1e@W{HWZz& oDPywҋ r`V; ݾM-k+dxZl CSOc౦L;c,Y"]]^۽Ԯ=0XWmK Sꚪ#-Jk{t{M ٠$)9pi0Qڄ"3[CW 7 ɝZc+cR4 bv8:1b:]1p}+kCzqɀ ؟aKthub>jKaH/.=bVNW2^\#]y԰RW; uC+5bQC0t; F(V3ـڝl^]i޲֫ {7yFhx7T;P݀쁮\˕)vnn# ĉP"\;7 =2i&j`hZ M3 hдrR0 `Fl=bQ;ύZye3fWf hw58 Z60Y%HO f YjfTM^է zS)hsRM+Wb*]^{_RP*ΆcZSMRy{vJ5 )ˀe ES~ץ,C>JYC(d7;bJ:]1]u/BW m}]6Cw3Pzq( sx/o7>RN39t<f-"k}?E/ G>y믿΍ 6ݸq}4Y_TlYO3GZ%uioJWtxUL.zygO_/^"sqM&0eBgu -SY[&ъ+v:?+^|zz- {? ?x*&ŧq7]7opsDK.{4().j~֚ ^uyŊʖ'og;TFv"yCii8 Y .C|h\C%~!NϐU6 T;=/\V(A6[Uߒnr"h*kK� J,&xcJu$TZDʘ6eq1rѪi$,|矠_W(~'dta4m.\m D@ CqmMT$]js$ZByDjB>yj-%JFeITB6JpMVdl)I%eעEzM 9Oή!Rk䒔!pTT3 JTGTI B@ii{,tb 3)e%lFZ+ $eʢrQXWcpEK3Rlj-k,A됔2w 0i+ MJÚTuL0-6Yާ\QhUFҚw-"9<[_ _B,DRD;Y^]cgYxc*lY+O`ɐ>% VOĹL.Vkys-ɭ*K'jjdU9挑9QJr-^4kAr-;s7:ھQ2ma4n )a;Qƪ)L>í+8V -_GTH"CZ&XaS8Q+Q49KL }T[2IeSq%Bk=I+Vb9kOeFM*\ J{+[lڕ@ˍQ1KŠe-XjJ! QD7%0_&q*u`!LM|F[QPQB m>p h-S͡mámgu]8Qv[5%JR-*jl Eȓc0m&t9K֭+.-5TCZfQőjJʞe]}`DN (`qpJ۵Y oXJKI02, + ޻Z(Z+b*u`q_bn` f\,U+V#IP2R`6#Lppe-X둒d&X{RpU(NP rʴ] GegUd-X=U%h;6B͐jPo|ͩW(cCe|Yo"0!$( ,*$" m,JdM%ĭ))ƒ] m#LEq&#%\N>n-XD50G[-Y`p :JAN)I^m\> 9RV|oUuAd"j_\l[hy?թJ(_J㝢JDA6FxYl $DO;79x">Z6hz!8[g f/G7G,E1k44G)T'2K;4')_09`FY- <^ˎ5xqS:s .UAӶdz|gx0(]`漄i>ВP(!ƫApdTv pd:Prt9| \8 muLA-Op !t]XKjIR|'2LB*!|p|RT+ =]Xh<=`C%!YZ{[x3$ng6"s[US^_<{UlΈJʁ`dəL0)`U U͍_NcϷy\wqY2 "JdU5 fgX'bgHbvFݒ1^У[/zDY,z:ꞢƆm# >\b^1G|^_. Ћ^p7xIt]K|l <`u ~ P XP6 C,* AFr ~Ps x*d&`UHڬӽrFabΡ5tZn6Z ..L.p]l# @fWE ez@lwHUZ$r|y+ 6yc3~L!5DtIP-07qךi<35*)8@xf#Tjq- xFkj(a#%x_oVr%Tn 锓Ʃ ־ 5Qߪn7tXc?*mp2 ̭3;IZk-86:!S ۴{KS ob0B㧌3TZu\уs9LY7Ѡ =2yJ3\Kn0(z 3`vU C]H0?{p7МM\mdf賆.#TzpH45nkHU;jO(\:r Ʒ0kM^Yaw(@(HBV(tF\T``~OkPP {LHmX1)hGDz| ,KSIa35܁c:+pT`rq$2bD2(+ 'kk~נlja%L*\'v XkˀPKU‚; dB3|Q{p+}!0@/:jM&p=O%|[< kCUVruh" 06c``VD)xDCXI*.ELYDpuF/C׭H!ީd+\e+T 0]B?דe,nLfm(`:\W%LT2K#0 -GpNΫڻT*%, UL?d(|Ȣx>n)E`WLb<=2( G#~dh;X/0l8hh| r:͎=..|4aa>gSi6fwO`RٜvA:u<+hel:M U!3Ϙ-z@CUUmR!vۣs%gzJ 1**ˤ"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R=Z%E~[B%q5_\DkB+fS NIR@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)HI $93ۣs%X[MJǨRHJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%Ui.&%JQnk@`^ VV@Q Ti!*R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)rhG6x]wo@ciqr:C$\.Jop ԃ."c.]hzc`6[Wh[W`.g[WhxpVYW,@$ZWheE|pfzN@s]B>tB+"aӛ]f0խ ~e]~?,2+CpuwV\lgbX_]W.1@x살%NQ U0=rd?o'Y4Amqgty]!@s;rv1b9-p<T)G4Jx=vߒ0"6Uq|1xo"B+RΑ&OnRfW-D>v ÝqSd&È ]D%Qe1HVy#S0"@:ǯBĹ* 'Es/'ݮ;T 7]E 6_}v*- 3/jb^yqje7Ji&EJu\2iǓl{k(Ƿ%~Cr"wy~Tt:L˓BY) *Oug:EYiԴI3m.&!C?+U ]/{ͧh46vZZ{ڴx?f\vaPqeGZy .W\ bF/?GxVt^/ٳO l젹b!_ͥJuqwaL?6=`<{0 DR6vz2齨!xkQݗCs-"x&\`k!ا}03-~WV(Yw*9"|Duc !.+j"c]j]IaxoDETVCXz3y!!@prHG_)d'xF}0g.K$gM %мkR7ՙɕoуl 6ڵ}E + oN TfL߬@Ix+VB#LJ_|mֹg9]1ZxrOG@ѨSK>΢`3 G:Gq )]G}.5ȳig~8,IP<pjƮmNiGp oF랝Ӧ9OLX t{vߔyeS]hzbO*|gx~ ~ Cj342U(񳓥}UV.0-U} .7 }38A7@ߟ&'5^l';x{+yՈ?q];xBs||xL _BAO{| ό_?>5R\LWQvv7zW(' vu_z~eM:<ˏ86ٝ$_07kjc۫ggLFqُfGOQRu4>%,dE/M =?ѿ>Lz[nPnrÅa>Вp|2b/nB٘\#TiӔɦ_ئeML>}a:n5~E!xzM|;jW]/lL=e4TY)hgJFSJZZ 0k)\zc}XN.!7-!;!];nN{,͡ٻFW|åb@wؽpAbA<زVߏ-ɲDzE2Q Pfu>x0AQ:]v.ds,l Xſ$Vv{k ! .ڡԘPXX^X8Bپ,Mʍ3sQ?j@8ΰځ-@6%s7BZ3L@f\  MJ=z@/id4e|iʻÍ1C{"=3$(1ǎasߝV5?hl>5ZK94\(F7\hANU =ɵ4N]nWvn'럥:MlulE!E$:0J5QG"dԼX۠7 09(4Wf[tk%^{1uo.[Lihvm6N:z#tdU :.rZ4O'}$f/"I6Dn$$'/1$}-;]q\~{: Cǡ&l$dxg]7Lz#H42zCW۽^\<>\^Oi?qVv~/)Z']XzRevNYߝt`?h! P"E" c8>|n7y5,/gp>v7{OGhQwő/,_hs'|qYopsoh8e'u2~y:Z_$G|O{y 8wTQC١AuC)Qh3+ϔSNt0uJ/ڵa& 9h1N>ve~iA)u>y~\e+t>Xv9Lb/hpԤ-XUvȆK(qUȑm]n_Mbts0DV压|MOm(OB[P9Φq-}?[U:dkFܰrYϼoҭd2|R,JLBl߲ѱO*547ВrtgH8<ֱ]SٕNtky3և[NPg<; ߹[.Mtk|2/:O2TsS[3yWsOx~(w+yaamkWk5D`8ǫVo2mbM^'UQ`|3ӦM-mnl'0ꅟ`ڑ?׿\ݭ@-R}T7Us>O&Efs-0YK&]~/? F{, B#4:AEzh׹ܣkS/`о lhz|=lHPM64(Sl I!j,;w84;Ml0n-IK@FF)/r1D]h HtT)XV;1O|[lbMzk^HwNW]-s1kbf(l׸keFڻ~? ]ZGZy:E}9MQ)SY^Đ>֓MB>ၞmRDY.Du6u Wߚo=$Zdh&_NNഽ~CS~y<zlxy0 xPIV{uCHu4Gg((ft/nfs.7c˿w#r $auZuء> I*:8-`0B;mʱH s/?қN[wAw] %r)R^.h4J )AJ6)D0.>͖|=|%A u]\aRf6?`2-E9*'TrI+KDVYM!`>ALАx8phNz}-ëIf4N_MRARBx%Ѥ3޷l/Bc L1KKPdG Ps> 89+KJ !$]*~9ftPGOcb0y#aia \ 椢 ҁoD#_d^KfM_-^RɛF: "61Dو?9yi4j<^yXm|cĔW T&| VJ #<NF4Zw$G!)cmZ )X $E9dHkPE Mp: $a7̴|fƚ9y 3܊~!%Q'><̮.h->j}$Ծ9Y Ae H7QGeL(%7KFzBz}9$AtA[c0YeT)qb.4zQx*yg琍ܯmt;/G7{lH7Y7JŰ-y ?@7zVr8hopђXv*0k>Eΐ@k6n846Q4B["Y 5 ЇM U)u8|08ł&L.j P$)Ʌ  $JH7$iZ .J߼ه8~m$#E&0lΰQ$ kBJbvA -ۨzY&a (>.NoL#J ZRߝgٴM2,CnCԛA뢽7;}֋-[GMjF^y{ Yt6>(_4hQٰ7P2kI9ϠRRʄU9ݓr.16@6e]),U!UJ=co<7қ/M3vg__^ǙuEǏ|f,/f; W7W Rf̅NDAI2GKXjExlS@M8U{!KbdPݦ26>±p0N"$*"YwI_ljBCڽiǮ^$N Qha>{RڬQ^SFKj %Qͼd2_~IK667C%E}M& GY"\tzA}0=TؾcWznG|J㭈ZPD\&mX <͕"ZC% Wa,rHb,lQ*F19$C"G 0=bol7_ud8[vW<\goZ_Tg8ŧ42 zXM9KTVGjb"S F9 ~%޴cWCpa=~_U+֧7~tR^xio_}Scgk bjn>ٻ6$Uu[j]d0),ZY_%i$J5FbKfMOUu!@ql&RZA$Udh( 2BfBv)̞B 4{i" 0ؒOpC%QJ@g.2ީe%"hq#U) >&ѤBj$ODbvlYwFΆyc.> Kˋy$oVws(zd'5^*Վ2-,Ed P)|@ZڔP&SmSpVt[ܐE`ҤL#BL2>Li%Z92L ]W4茜xau55HNJMm!=]= ^ ANZ:ffǮ/]v]-gi,ڄ-pSObyۤ)WPn8~wTמ/CM̸sD_NȵϹ?퓨U8|<Ŵ(-fRj^#gcG'5$+mb% EVNG M4>NW[l^|JK-,<XkSћ/db2-wzGbеlLsxD~׶ $H恠:32rD!3ߛٛ{j~z)lL^@%A(@Ă 269Aj4̫r*ԲUcU.gS "n?vgpSûVhm}l1׽udD4Pj8DFǺ>z&[}n#q[ΖBKVm`:R5:撐 >1G-]"[h?:#a x2r[[ŵ=ޗx7!qE,_Yrz}W] fDҼ.98-TًDgO4Utd۾׮wkTcmn۞`Jgir,= cR>gK\8# ((O,4|Ce^X?᧿}OK%0lWPM?;rRot3Hi2d,X g !+/<]fxQߵBo ^OhiO7*jOlS-ٍ[/Zcm< 5& UD%xnS֖ U4BJ ʢTvZj nz mN 'Qmށ/J:n nJu0mlMAW[ %@ps]NBj+\~wsWg7N7SoRʹɣLx3В*X|H Jם h II"8JY(`@HdY(1B*]33r+ &e%cEԜ,JC䨓.űɕM9*7\ .eBDyvBLBh4YsvfA#1Y2!XӬ.=a8B;TEsoubg@]ښh=K=}+ϝڡ`v+',5Ofm˗=/.70CժL}_F6<2#{sFPl8B.SPt { 7? 9E Jr )ZX@t)8TT2$^ߡe;0V;cD :9x\EpNz&5s'B>q%*}DK&CSFM!dȠUD@<uFEZn= bx nMDb{}+w_ku7\ ؃sU;Нpi\JiA #FPzyD 5J|\:IO0Ί?>lKi/C=)ۙŞR趃9;6 svsm1|+l\~\L ǕLk`~7.rޛڶW__GFQ=Ny>`{V5sޭ7^ȤBcRMlEUG^CcI੨e˽v}oPľnIK6M)V)pKASmfVF[<Ŝ2ȝ:LJ.%])xNK&| {ˤLg5Y#:#gC޸>H?q'5e{O* OYf̂Lw vtr[h dB\2(/>|YH7r3w.$7r31\64YuS>8>=d $"v)a2hBK*OhGcƏY҆~Ƣ(S4b}ƓP2zكW&{4R9];ᣛm1ed-݆H߰t_$99iXፅ0q;epwp7Xk>tӘ黧7.<>t+h{^Ӧ֒$37JHRrɴf"EB" !6P 5<5zIߊ}q>(Vpŀ `5.fi傇XbQYҎ .dcXnvY&/VqoǸv(mAW՝JS둝t>!FfdxOT;Z#ia)0 P)|@&QmSpVtʌBIF9V IpGxdA$sw,ĝ/)Za.ұbS"<>nAU 4],=At1=h}ȴZ:fs'7-=v}!j$7C ,Ҋp׎)NNMU#NMXǃ+fRԌhyE͊xRT2)ͽ%@&Rw>E*!p3u6)5밨B%gpײ6MQ)5L]LGJޢ;S_}&8ÌQ w p!gn+,΍m{55gy ,ntү'J>0d+U`3(ɡ bUE½ XP:w|ׇ[X7a.TJM/rԞ4dY4 fÙ#P*SjT"ߗܻ.3ESrb%Lh+G7]! ҊR,)J# u+HfӨ*,}ɳM/=H72 ML+!i\ wQx{,< 8Z6na~zq GsKottx:*NdZZpsZͩ\N;|1tͦӏ<7ꯚmd[ (>?{׶FdELYy[b;>uFFF5%EʷFI,J2U(hY uNTdΏNl0Q9w9߾W='wti!%..m |ש&ڥ{7oOvqpB qKl{M?z)K̷5vP @\__D~_w\"yX-M]|<_`7ߧčroEQ7.׷Z|2k~\]4J&1t=4(&HrMڒ g-=~}ɀ'r˖OGDLg=?ft?EMZ9Ά6hl?hrɂM cEsޑ7\aV"KdHbi0&㝏 oDsMVh.Q#}uT.9U~1 h3C.œ@9`N1PP}$<ɮQ&v(6A݊nc.i"_}V5]!ZE(C^-9Xr×D5]UJ@~Zz JW{@F.q$cT= 2hUT/)!ِ.U%EU E 6Aߊ^L'eo^LR bJր/9B_A<8b$1Z|4: dƠ(:3`yηdu! k5ך3UNlbmw:ae+1ON݀PS.D,:\ZN v2Z]41k2L:!crs.>/ODZ\b2I7)Z R1|$\Ɗ >C>6JhmgoNIZe% [ir[ɱ; H嶞313NU)\ c/~ZkwWRr. ?2ӧbIًݳd,G{|L8|~Ë| 6qv,^LBuc= >҅x3%SbuAew,mb9^hN"eoDZK*; ?CoE"tV].#EZ@LߥLkesb͖93-\:2{1pi2 x]u}!Tr!Hv\6!m錚j.ض \ 2{%8+TԢ|RJ%gV5yߚKR}zp'O#v76e Ek@ы1IjJW8.R@;2 #y^<®F+;~#Φ€$X'Cp0:pvT~ڿϧ)wa`v k^S:ϪeШ|mx#.%-y֐KPl%Ү&> d1 +5̉*j)#U.D&4bAڬW =UYƦ   >2^#yOٗYOf'q@'l#vr0J$Uam4鄁6'_tHKV#G[ALFYD -lZ@$ U NbH8t5Pù;b.Q]ڃYǦQ; G{+N , Xщ[kv`三ڕKQ+i`NU8l'|@ C-.rXC7mIȭHՑ$9Fv`u>hKLm@={,RI6 U ˭Fn\J6V ǸxsVF ɠ#dɀ cn1놉^'W0@ٝw6cxv5[ah7BFKp_洱wYQ(k̟LLa[Տ{s2(9n-Rj%rM+ѓu[*{[[R>U R)ҝN?m`>yy ZGUjuD\t'M8ypu$勊9W{%溢ɋ%jז 6QЮ6 |.DdX<S|֡rfeRe\{D\ƠLB.'L%GŽo$ܜw܂_{&c㽙mv0ODa;(Wn[_ W j8ɝzlD>3G|uEĨ,4 GȮzIPL&s 䏈^xĬyq%z{ +yy5='^S};:^AW v7Ooo7tɾyvp*lU]ugSx(ݵ2ؖބl4 ^dI uQ!A>9lQi+P_ĺUFX\A[U,qe $BwW".bK+ ̺_T͟OZY/cO1C2zQ.Y kۗ/QOikԊM$.0R5s#Q`1X}Cm`]]H}S5!־3$ 'u=O>as)OÏA|{v>w+4CW ul~8R1-Nd d|i=21DTK!bغF*^ l/j;E6ϳZezJ6%;w;y(\_;OȖٯgUP6MO/K'4lhO@Aw. ȃ,&|Z:ɾ*Iz1x]B U.52]A;0EuI`x:!ȱP ;DZ^qx:LZc[99nKwƈ+׆UR0ڸ!*!Y@H3Zc6 Zo_<{<=p/-l/7\}tP0+|2ca@nFR )hںX`\#V"B0T X҉!KrŰU>TeD)4"h2q%zUVmgr/`uV"'mgݮ:owCc]Q֌Hv/x-6Db$6rEΪJk2IAƋf%DZ1 x3FDž񜜌}Xko~&hgͮ)F F۲*.#&2@gyB(DOJ]X]0QW[V^@TѮ=I?yw}#~,~m;ӟ޾]mZDw'޿VmAWW'ˆ.Ys3g?Ip"ͫEO\ưwޓq$WɳF :FXleXC0 [=3<%R4US]]U]e}nFMy!j/=_RnT %:eʕ0`(WP.P_5Gm[Sr廃3+zU>D' |vpI%ɞHn('#vR' M4lR1{,)>Qvԃz*v?Ls n7xW^=K2L$j~Qv˲9KI#W_5"8eEL@U栵%ь `Pcmxcoad:֌eHhylN-   R `ҡ,{ĨXuO$ qUnlW59dݞ+>6`fמ}^4oS"Nݓ|2ݢ&l#"ͪnנƮ)fyX(Ty=3{iM|R3+lmDJɂ&[x:(|^VvT7n~؎bϟp'gsKէ[UΤ9 * m,pgL8x%}ӿ;u _y~^=@;EN*h v*Th`0m |qIx^BqqQlyG@f,w.al|:Ӳ_j47L<& ;~돇.5Pu0Z鯣I;;SN_$鹉-PӉsӔڽ} zT+tp9 LS7,n}[x[ghۃ ^hТټ^4){C3K2=]wDjB1r ?Ĵ(Oxo@u Xsb_!c#*LBGU1AU){'{HNw =ɩ#Wui @puY)V7#u1^V0|N_ U'؞ _r~~Yd9ĭ7^oxKC ZlSBq\94+:XsqۉU]j%G3X?Iݵ{jG|f f<F7+\:MgSyX$ h]@hj_;TXgs3 x Ai'^5 Ժtu6ocMTYu9_3D+ Xu͸Ӯ|NghCIUV ? .?xnzk|Hrϸsʡ`9ƒQ# k"B koTR4ׁƤ%|LoN|d@oS³>IQ("" -(Ǩ F#R`RN0v^@O2ƃ;R4Ƃ&٠{{o6"_i$(a#l=P0'P<\6I#FrdOZC40sc63%F=xT NfbG!F@c9G ~(+@n$r@kvy{ػ 07dW2U_xp]po7Zo|'?K,ZbpΎEBJpK8ՊYVPJM;x+Qat Bw*H9DŽRn&x92J#"( - @Z )$m5pB ;Ť]EQ)ؽ_DVZ Z.2[XL)l_~f)15Zdys'`?eQas)ˁ\Ks#' Ỉܲ[󑲟Q탥* /+*vF`eN-Ow*m>M"!&3Y2['߁j׻ZE{!s36Y}n>wk % 0 "()֙b jp5Rh%Bkc;y-Xhm g3h"Kx *:dZsj([$K5!3]s҃%KQ/5q- &uD9 wy!6;˜ w'K"M[ƕ5+/܆D{z .KC!fH /ᛳZ{(b1}i*؇T^s;ۗ_iȦ+ƅh UJ(|z#3>TYv_w{c¿7u]hOվQÖ ط]c)2w.؉\>lqZZs[*[ň3mXQE3X쌕ܐ6g[e=ئQ(AaʌGTDq5'lQ! وXpݮ=hre1~sN+[B7zf!% @-Z&Z+{vʽ`7g1|zO)g\HκI޷> dlՇW94Mi1?YbZ%a ?ƒבB ~R*?@׿8%H g&o}S0 \,q}~l0Q_ J.f2V dUpu 쥴ϪSx)?Qqf?tj SSA(j6Uuk=^LNP6WцɚUJ՝w&0I7`&٠*N^Y x}՗8a4N uMi 1; ty5.^+Ѻxr`dld\TēY.rF5Ҝbm#meQ=:r'IA:IɎAvJƹ<M9k !IM`l)[MV6zneZ8le~eէG쾓"%í0ic%`~bꍵEߩL`"l"$\^% [pziBXftTNsGB?uRJ;iSlcWN el-3=!KRv6V-d!%dLrфD}Ƃbl ^2̉KLsbMZ)mRqXt"TqIvo1%nٗzxRူNJWֆi*v`(7=j+GmvfnKhk TEbF;FY)Za6y$6$hGv@-nISeV|4/ RΒH&`bW90QR@$G%H([o\3+n iL$j~Qv˲9KI#W_5"8eEL@U栵%ь `Pcmxcoad:֌eHhylN-   R `ҡ,{ĨXuO$ qUnlW59dݞ+>6`fמ}^4oS"Nݓ|2ݢ&l#"ͪnנƮ)fyX(Ty=3{iM|R3+lmDtڻ*NneGEy淁8{Q۾. g/q8P}jXL LqwdCXz؏WW9n\ a8*;gs s8_t^`~_`M;hzvW'Iڝ%{ɖwi.rv3-y88Lsʴc"_#9Ÿ/ ψsႅ @ŢZ"L'o;:>oh!' F;/{vNضqՇML]+xHqt ֿ8;^6OL!j 6km]8~e Aꭃc/n?ܮvs*C|^Gh$)9EGam/t68MbaL-h]Rpͩ)꼷L̂<FKvHF׎?:6Y1ҕ-X$v0Ti41AQp`tL:x Z4j%J:'ZEeC|t1_!RAcpsOuU|&:) *ip ?͎)8:=jP`z'Dgn 2 :H%sPODz~I3>eHA{ .*IFɬɰQ9i,Ԃ~f+e왫_gtQ$Y=>˺t ~zaR' {%0Z9e[̇>gܒc|c V` o9%C cPs|'ei QlKB*ZY;N3Q}bz^d| ,'6I& c6hJ!jOV)De&Da? HK}T@MBA@9yT3q_MCgxX/bEK˶dlL~?-)jۧ3VCZQ.a섦e1M3w(:r! 2Q5 өCX4AF`c"utS7x\J. m ٙ 3q@ı_K$o5jGW\zmyT)z?4GiN "($7ֆ!)<;jh;BT2d腠]R;n3G=&H-# ((f{ +]*r)lha0e|aʇ3=[q>CsSL5'y_F]Pfc2w ʃs]tPr~FO;*x̉{uOl2x]*Q"N@."&}@ nAk8m,Hgsp߅ϱ#Ѱj~=5B}ՐOX6p˦m7w߉vhz 7*-[H+h%E-ĿQPxPrQpQu/꼾F{چY>:q=k!*Oے1p @4OO -VkG0}؛8U%^3SaCȝ5Ejŭ_[xH svm8Ust Jlϸ#f5ڑa>o~ƏVyZK{X֛NLv;UsuCMB^&y(I1+u{97\ٮ%#misA瑶 :j6 VVdQ*V/.rZHRʓ´ hYDxtŝ&'y9ңfDG},.D팗Bs%U%HYY1BIC=C}g&yo_֢f{uNH9Ai`dHg`S2DiIS jr.t#f#  R"S*"Aܳ Nj;Jo%M-P 3SF*p)C RS.3}=KP,Ai2{izܖG#6ͽKE[$KݢȄ,`>\frkS><;/C[1|M{ggc^UMTޓ(tK:az*;˹%@ C(/2³K!%%ND'\yXQ!ut IG*xA'K=OWAlg% 3^_E{Neͬ 8.<.% #yUF Vا>Xb7n 4K)DG; $&U( Rilb1;NZL^^2ڱxe~Wie;M%ָM⳦F00n0vӥ'x;:BQmFx8c>xW.gJ+ӊeE,/946 J~T'uk=]jL-7ctASGΊI l%.R8njs{AUs?^֍q(g<+E|g k1nDhuw4`g޽ߕ //_J^ cS!$ wq2FJZQ/`0X,iV?!Xڑ"9yCP%2iq4^tG9[UdY+7oFKf=AUG_*N-^E8U"'?_Io. ܑJ6*Tԯ^66 7˫5~cAfu!Q](U7e"TpQNޟ:)}2wo|5 [:)зNF0uPIGTJ@֘a52QHkpZDD FQ \2RIϽP6>I'uKzű1诃KD`: 1#N`W_e UʁAP*&QO%ƒX">fS}|ݧ|)@ u]&A%2?N@=Z0{4K6e0c]cN䴻NA_d'PEfDɀ<i Ápv6Hٲ,*_[5Bj(pNV XQL3{v>/t#b+nʆ@_,s},X&s㝉NmBg?3m^-H3g0K[|_9ڈy{M>} 3 vqߨNlުfѤr:(ҾL.tF4 ʕWA_eEJGC_GvuzIo)`18fLJ(C #t\,S29h}-Smߓ^ &tk^k-bϫGKv.PF{n,& ޳BtOEv| rgXzi*JQ_z:{-Y 8otvQ],j)kk]/o*UL[ZzH3g[+ψ\ ,!%p+^;rΰJgkagqZh:^ ut-|ԐpyS{-q9^l ޵$B;Gw 0絸f"@ւяjI1E2$e/wO _zDcSSUz FN)1c"LN) QK.^ D !zH+e(ƞOH7)Z-.L.NBtY4HyXc0[]iYkNkwv+NL`\r($qv KÕh0k#+b v1_f*D$?WS#ջ"۳0:jAp]櫅kZ/z"`t/?w,"yT ٴ+!r^HfzwA2ցrNpw1uAB]ߐK@_kvχތEmƬOV>*yɧb}RJO#46}eŻw%יS2΂A#Ubhst; 0 PG&U(@?ߡڭmEҟ uD|VWjDc_.cNOnoB|zwR]O/2/).mQ=ɦyR~iC0&c)hts[bwLuf[xФBKÀ»I*J{$)B^Iw8vhs9t<R`9fL*;N) KìU2BJʑTv-kԦ]'ZMݨHV -a .6-*ن/vٯgr  4ےtu3HlNXƴ. PަdQEBm k !x넥ߏ"N*H9!Ji%}4jJdцNX!,]7ݫ9LY< hGYg5 hbt98 T$q!WJWdWu_V WjAvZuMm'kk itJJ6LK-=1 9V@TKX)u>mW^r張U Z*i^zu^סio -FGU"l}dFED3d!x/Cd2$C ~@#9+f6e$ `Rp$z~5y@w86ҋ}KkBXo/\2bSmnC}rռ6>L߮Ƈn:z%tDJ꽡tχKDvR}JiQVxHr(Ңy%K3JVZNqUF ĆpGm|ƒCý7]4 m7֭T^w8~(<r=ձj-;PBc]( _.a{4j6i8( ێ*8RޑcAN!ζSKQy!\V.Iݑ4.a:!W^#jCiX℉dy1 b@k);xK RC`F )`Y.Ցc6CT=Y-6;SiVz5e[K_k.51_ҦSvi^O'$.6N.pJI~ٜ_'`-MT: JZ*Ut8? w!(,*MW7hg$OŰ2bQS)ٞjjS۪Po5H:#+ӌ/80{"k. ^#wY,PCɂ5/Gq hNtR !2݀ݸ4{p9u>.75~!*?O/3E73ĥzU^D¬ƾxaO]fbI܆lFW4~2jFos8}`˰}5 idE#1?3jÓ]ymv~F[[\zZ*LltUc9jSoe^C3}՘C·~`'E"N7m)1\2Ы*iS· Wd3ZMOC${mWULGd^"cIhCtNZKGV,ώcY%0>8Ղ%Y1ࠓ~vhagLPd.1&х!"C[.:; Evm-;:w+{hIyxVB_ E .d[+*Yb~ <=T{xrb7d%:s#*HpޑEcCS2 %l7SG(,kRRvY2k}d} ύ6Ld޲+5rSa&a)q[rΧ/\< 9R7`:ࢤ}|T5+9JEq QZB*Q`vhmԵX[G:Z4YB2D;@KIs&]EұF5"3ko7P~s{ҍ쯴ѐg]@ozuT۟/zG{' {hG?ˮ5,0.۠hI<(Ui+2&4,A=V]@HCpaeDa{u(A ȴK1Ɯ3R ȲURz{! *Ю*OE$6qaQ`0$Y0 ^`2YksHb3ť3޵6ndٿBI6]c1l `Aֲ䈲HʖlS,̇tX,^oSuyk2$5I{ <* 8CvY$P9[)rk2N&-0y6;eu6;*Nw?g2|ɨ{fdEcȬW{ ՑrkЀ5sT ŵ< 0꼕9R(x |Gm7!I Ɯ".72f3gl\%fuPdBcFpF21MjֽRn4(p84TؚdA#m9#NP8V Bh \CE#aHbx 6N0+D{A!#.Dx˜ێG~Nb jguQ[fFm٣v`@F +X{(odpc| ;45:b&-\H,\BfaG9A'1YXGTdlDP4L@ 'JnIĉk@C]lޱ.xH~X4u/>˂g~T5`?wã7PJ1W~cRB'4X2i祢V:HR ׄ!@Hsr!NBZǕДp0Fyc C(qyALƙ;LdeքK= vF$㑅H>VSFRM7-a$EV 1:9+BNqd.E=\ RɋCxL*7sE4ғcpFe\ۀA0q24R!F94xj*Sr;h7!,$ y J>d*8%sbi1g;J@arg4fÓu9v,&ss2C=ltQ~tUξGU{=u_uu ER tK]K, r:6@~y;|\B3%ƣЙ8FϺR^s'ctr1wKsq}_`t9`u-5J3VGVΞXGsG6k7CSMuѢYH}_-z"1ίo[z 0<: *J(TRǩV̪fUb^_R'Bw-aP,Zx fT!) 0Fi a f:H)ǝ.gכ\@]g]՝*Ӛ嶙Idմ\Zpj%=|.32 ӏi0?63%<@Jƶ&GNxSMS ;59~~T]Fp>|ylcg]4sǧY+f}zj.mN/x[ ΤOpOo \՛׽%_C`~Ho` ?8v m.W?8fY>r8B?)qA+sNa52_~oq~%t, #_JA\ 1Z"^<,m{ܫU˖Q탥* ɈrEe,: bu =N`} _;)^Sdpq-po;w4ӦJŸ2)=.e3yH%Jb jS0MbZX7ӘG"蔃WleF͝QFG0pGE 50C'e3|/zx2OcWc{;Ůen]/=BOlS5uG>YIf+ !X \3U5[w7}*OnLfաKJ.]+tcKzqn{ywOB3{LMa^!((ObA^a4UTscARsF dy=hRjjXm/aIHv}Y'aZ~r92bYڒJX|#^ͼt}ޗ~x_P0vY4a#A2sVk/;^sXeyw (¹?̙j2c{.p/۞i=Rjiը@{2/g?9h ӔDE]2X@v'!U7b}FlSFl'⍄AaʌGTDq5'lQ! وXpwhUj=.sVXkpyz oG;Kܕ>]m=٧>a1fό|y*CI3ҋ@ZqQOJg(q^+k}zJZΑV5[[^'Yާ1OCJ 5,Qd;QW7 笑*`£bx'5y,RjMP^@Dl~ vy* b:k?@zgҟm20r 2p"!$GX zc7B:GE&$\DH|;XK:0r;(&i*;(!B7#zlp4mj]USu8>I[by7mbh^`Jlx8S9Z&8E(hb$=F<3LO}O>:G1 }Ƃbl~LD/DR%HY9UzyᎎB+ٶܫ;;޸<.a<O=dZn+IJ[{d;+1 UE{{uP]a߬AmgT㋿?ǸC"[M-6LsٱiS^O4i10P =K,je\{f>lNg4is2mahPɒj[=% &ocU&Hzh,LƷx]bɈͺ6`_<$;ߧ$@g9Pcɨ @ L}2*İF) 3t1i W_,W;(_^bOfb^l~W`4nʢt" -(Ǩ F#R`0DJ)'xRVazyD] h@;$/dk<.]pF^XYn zRR508ۣ`0to :x_ &kʹ}AS'R{W`)J \%k/ W`%AWWIU2x  \%k7.x:\%+•d 4%{W\Zv]%+IϮ^#\)N0{W`?pe{î:\%+•}Zj {W\bURK/Ztq i.# W?\=Jw Z=\1B߬E_iGbOe3:9*~ ܨ6;Tm`) +?⻔?~*@CmW ?yq\݁|UN*U_MRsW# sT'2vӥ=yu4(wlR&(z5{o~Rm=76˳L!O/_ոVeL̩#C5tdMaeGxjiܘ!Điј`U6\Jsi#xLC\g =)D\bI VrzR¦I4K\!U:\`2tBJ\uPJ 5r\!6P䪋rO:}\:B\fc+gWH) A+MLAVmLyhƮL(um$UwZ&dLK\!36 (Kz(WJ!cZ.D#WT,r]!%K3GXC׳ ֺ\='k3Eνy\Q yKru]5S0pXͥcs ׶龎QhD2JD#ӈ],2.Ӹ^$iN-UDr~<i[ڣۓ6;r%'lE+//RZj)e.zL!ẘL# $yNf== DL4gxb7L>1/83JZq,srpNn3- sX|<$#&:u1%$kt,вf@(k{;ɟt \!ur*b+UR.ʕ\Ҙ E<y,rF.WHiZPH0\!P'.2䪃reN{ɲP;Ҟ55p5gt0~?S,[\49oPsUo?^W%5'^1^( 7=cڡ86b5`?7Ub!تroqoQfb[i%[-/rMl?yJCYdyZܩyNxh8'5'ى|{jxxNjxmw/ g2ŲXdK/u)ຽ`޵:A^tTO?{GEO 6Cz#VY}HT)c0B4Iޓ=]*mt5Ie 8Fs p+4O;#?킏ݓm4֓)8v`m ɂAϧhԯBmT7*_ƨ~ 0LegI惒 8!9U遝*7ʼnTY*xPP\\ToԇUsYRX#_ 23im5 9aЮNn7TD+#؉hVF.*Hq~B\!o1]).2Bsor\!em/UwJ0Lf{"cr=j2v;V_v:o7k<ܳػ1ɳOˍ&/&))v'~5<)KIaBO;\!t*\! ~ )m*F\)S\!Z&b+gWH)Ӑ[J uDr3"WH/FJ䪋re֊F$WlX}&rR| y8R*>&iE'=i =iEJ.&RYb+2!AU.BZ|)LrARa#+,!A*B'2$WO"WZsdDr Sh Rj䪃re4no\!g>iz\!sP ƒ\uP6grG#Wke,rPЊ$\=\9C3P;%+>xBJ)\= ]/Fۗ+?`ؕnyݏִ]Q+!W2աWYKn.8sƉ5]v%HXqL#4e)H2ARI#+!E,rƆ.WHY~%Uw Џ2~P|Ɇ?1?SOD%y٩XN;#4%li4I!hB5L"0))`R(&v*!75:R\!H3#]+e\"RQb+4tBv}Kr$r" ~ld2BZ R*䪃reG$WY4r"WHJZT'+ `/`y4r" i]R$W+a1]!pDEki,rgW@iNrlJ5t0 yucW~xwS-ɕeh*RIwLirPk ޚvs*4kL#Q4ZL#eNʴ,¹Xq`pIze/%,dGA,:n*[52i=JAeJ:I.k梑+ĕ\,UJq(HX3"WHkTr$\iHiL3ɮ%^/WHSgrewE$Wh4rehӏV )U vQĴGG#WH+_2u(WN8cj`kt4r$e4ea@+hP\.WJ!y;Q-]ѶUGi_{ȕNru cRK%;g |oR "i6*F\kbimM1Q2d2ͥl,FX i۪TL%\a Qm.Z ;-a MeSN*Z|6˱!FV MRRŒ" ҎO v0) @u_LJ\;q{itmCb=ܰ#26,"ޏY~]tq3 *.rxYz)ߔE1r8tR;YÐGl"˲eHlQwbLdTe>"Wֹ<Q%-dq,TRHF[e)vVC"Qp.Lm0'_O@[MF{[SLUy.cί/&sˬXV?mLgRnJoy~^f7QL~b=^m_,j|<x?z{ZUXb٬vɪˏ&C4[%D^U܇cZޤ u3pzч2RףLs26o7st5c:gXQ>ݛuMqx~xWƷܵ[JƩR!HV;;kkq>ˋ} UP( [QreÑTyIvi錦Lw2w6;¯!p0[%CW ./grI&T7(3de9^~/7Љv*>t)H$7D"<-mݛdН|\5Yd e! uXDRxlHG,O1%W8S~z@M!4*{:V/9t;&t1+Fͼy]rOƻ41n f٢7T>i?U՛]eOZ\U+mXwg\s&6C>f>hx>U>: ~dͅ[jcG`sLaNj#c i6/nudN>\]*ţ7Tߏa(\:ڵdrɏ*Sq:$g_{n(QcծiĀAR97dI1 eiאnB[zD)z]BC{~?hw`]R6҂ #%mm$Cj׎ni):9tQ6g'[Vz8zCHc!z]l^ox2Wx7[+VDuDf)RiR\LCUKgibMYvxwU7_V-i|Uu~n Qv<;3kO*46{rY}QL~:ڞO)8c+} 9gz9Wddm+ ^}m%S>v{riQHEpٸ DS%-:{L@:U{P2Rt2pVۛ (RK)sIt|ppLJrލm/ndˇC2'Rz3[3U8.c!'1rhi Y@3~L?"+R*޳vAkൠraϬY9(Ykn\LSCҐyZXr)|Xrszoà_'IEJHIXmp?{ƭ /oe$\7W!RuNjIj˖De٩`Hixa@ RHRkb0VYcesP퐖Z/}ߪf_A|㋗kX/|Oo/6_jכ ]@ʛ|W e^9𪁦8WyUJOp3ġBa %•R aHՠYCӝVQZ_Z[d!혙bɻMTk۟m ɩFdo?a<=X?Ӹ|F㼲^Z$ҢRd(j3#Nx>QwNZd6#n25%H;d餲 TR/VT`<#3[eT0Jg18 qSdgu(2 1f:V0!`oM,dM4]%g062 >..zW5'퍆*/1qSS9+THbecyNQqb 8I˔]gB-5Yoۧ"PߓnywQHEʼnrRS=&5ljrR^+Zz]Z0\nڑ5ޠBJbQ `IJRRѼ uQo.(jRM5@ܱ:mjK65NϻKI|ȗ̹5M^YL-h|EwjclJTcCyHR,πH'TLOO%ߦM^lbW`z`4hi)y`e,|IMeؽz%%#,՗+Ki, W߉,ןAH Qȯ,Pr[JLyC14?\t!N)/NjYMm ƺ-\ _IqsV˺*j$bv`F4Tj"f) gu,iόZQS&GFX&爀RH-+:\S錓8yb1 `Y1g˺@I{c$.Kr.E♀e *YH g0 i⬣n$I))tF{ Z_*aos?jz-m`Kaw{dn:Jz_>_Vm+\߮_=m/'~>2=z hw֕3׵J|NƚPydhgGF[RVl~ufw5]'w֚E?^\]Zo-dC"y bh"ukMcw?N,_u-_N㯓{m{,zMޟf= 5\h݀ILFYŮXh')+Jܣ-vi+~vC1[$q57rI2`-KL@4Bxԥ`Y؁N*N$e@4I9`RD?y3t{«i>0#\D=aU;kAY3I4-`|`Y1 ]a/O>F$dHG-YV)߰0Fh/-X(d$]$ˢR]5vC@Q4Z.ב'qʔ bYI #" ɸe O5Jf"eLY3q1Fs( ;#ASvQ= za q {oݜ~t3"bt&9Uc@NM]]}X2NZ[EwH&vAX:2]kfYPCc$ךYuw`DZQnRN̲ޔܟbGqa9:.,5^Ί8 9betAԯ#FY-+;04I'gu2Kx8e']_09!XXec'Қ7q')7FIJ9 < Ĩg2 OWY,R S<ղuj#ON6*fYEœe7gu'Yc䬖Օ5p#xeAaLdYa3NU2DXZ2rV'X8emaKT[8n"NEgZ1yF@v;XkKZڒA cڈU+o#vڌfs!=:mC IeGJLhңqIi[ȴ<7syX"/cW^MoŹ~x0<%/?/WW;4afXo_Df",+Is(VL3>dB޸0w{z\fmy݃w?/Y6ehw9UUW6-^3}+Uj|7'FsJlYrRPc7q*LOcQLvU gg E(Ȍ/pRȝĦ1Njj:f76Av#wzpɀg:UҌ`̎W۶%cmKQ_-I)ophK{'n&?Sa D]":DdQ6p$2foGo?8@0J\EGPXEQ(IRCC\@, jtgRxBB IyD[kar!*65"mܻ8EJ$;Y~1PT DϏ&,BƲ.Hv=Eƺal|v!Ҙthfs,E{]+'qNUgD~30^at1^UZ*Z d| JKcV#.(:gur{g*M[ӧ,iK) Wja Vb.ʫh/*|-KlY̊Q\Kcan(,&@4Jলnz*yl4d (QQ.FbE$pi,Eu ]]j"XB `M,ZpMC@ɜ vF+ʫn,%ŦDva.v.lYDIKqChI"H#'@N┩=SO^K4e+6ŞXŒ-;\'ZV aS5j`ϱq!n\e]f%\]{s52RzOj2+>Tkz>Q-^ƻHzU^6c zJܿ޶FJ?dXr읢WBQRTD"T$@(jڕ-Ώ OSHΛlξ} /"h(\ixq]t~ڨ@-G"0 |ugIhZʪrZo!!2pdfz^մy)B!% *[,aau|lig*js&ky=rU^6kRz g |"-y!8g`}x֏XǦg)aUhЩbGu,)O,jFYη}L͟GaLpT~ Gxo[Kt׳7b^o^`8Ƨ="):|ȫ1EU6d TAU¸ B;ڕN*FNapvP BIn9'gt!N[zBQP!,< c@P=C@ۡ(`CBBm9mB!WJ+gZY_zje9:L0^%ϪȔ OC3p =NB kyۯ~##CO0IqND+[AݫPz!RGp}  dT?O lx2Q5$BSlK3zZR#IRiBx`8\zPy#*P3_z셌sVR ӪyH }0B}R',wګeU͢Yz  'Ϸv&|K:Smz6~Q<ȘJTJrT^zH‹ <1ًyzٳo2VЖMU*7KP!{}(Myw~2c*( 7~4кv )Br S8< S*_n^Z&  {`2)8>x: <#CJzǐ΋(X\.2Hza>W_<@i>@Y)_Gg"[ϾCA<%|m,D'pৈ{pE6_@r'ms 2:n̳_d"&8l Siؠ_`Tb,T*DvDk?D10KQ{{o҇Aێp2-j|hl_py4}0 n) j@{ ]Y!=@*0M^C$z|l`zfh 6L[y٦SDݞV碚'mЬjJqt=?7^ VQ`79{'O?> Đ1_+4&xE6?dLIRRd_>}i_`яIUFfp^a](})"O|"=m]WWd0_% 4*X}0T aj޿'v|xF&P.A2cŘkq:00FU$zo}lW ^9m/I ٻq,Wz䙷BW?~BCĉrʎٖlJbnyD~ 5A Wa&'1ɹꍒNl!JUGMUC)i f0u11NRw' MsN8`]9!#֢'9hAu9?w cJ~tJH[d^/A)BCȗp+TJqsfӰ8L:j8Q1,yL!t O`ϑY%̲NۚSSܔƣ46Gs>LiH;V%P֛P70À S6F[ KupcP T|jkTj'els|IKfg؁/JwmZU Ahܡr@0PATqU`J^mEզ|6ţ*+?BZN1)u[YGC\hUx`Cwb(V*8ZEYn],$/OJ \Dk/wQam: Ǜ~z-iqRFV> TuRn0003s9i\$2vyt5)8ǣ чP J|I?ƪ;|(NZ+#I Vr]VzeYX` =ǵлD'xѹb;vlpQ!; Eoh "V!Z0W?#㗌)rMi*@MWw J֡,eYƽONaP٥*y^ ,N֟qD4LՐOh|2YQbSN)B| #aX{_ΟJ ^>Y4-Vi/y #;*`vV˳τ2 xSPƭ<5OKϨ୳<>?!ӒGp՟@Tc9Y'@9^f1iP|7- K̲4~A2 ӒB4vCCFx~^WA,T Eic?)[)TtS'iiN3gib*^" YЇcӮ^dY+֣xbfc2CXߤ%IۯS;,V&s뷐J%og$SL5u(##ZiDֺl%K}!7fv|ԜÝLAF˹YLݖA3PX>64_mem甔VƒsX<a$5mR3}±&̒K_ W" C @y?#yARĬvk"PB KyT9̗zH+W!jС-hEGU-yIbSh¤ -Ρ(݄E}wr8'],݆ʃѯ`3gV'L0ܵwNҘ"_6&촕GB묧$FBoF1nj\0U.u{uO/)TKn}S5}SMHuu&U _n>"kyU]ϪKI0 ef+Lv_Y*i4JYMaI&f# i Ü2!Q$; 08X[vQ\dO=yɻc7r'xK6&z)S$W e=xMٕ1Ovk-15s#SW~[ 59<}dpTZntWlBk% feːLsԴpC)x^^,!G+,jb=^ P'WDƀH }.e־)[,uW֫YR ,2Oda^ܴ J1c*8=k}w9VaW^pQA|D>u+^A$T>i'kqk(L5KM#j: PXQ,Z;Q@RN*#EO0~g|>s=X.Fn埢.hOȸŪךnёtXG>M^G2g@N݅XSF%2c8\p%'oyY G_d,k˫5[`mV<:[ kkZ M'q=S3@Zuk)IGiff&GLz8#kBu @E x:s]51,6Rc J0a$Y-FB3:a\ȟ,peVdr |Ȩqc}ΥDƠ13qBeW>{Ba֠n=.蹹^#\ 3]UP>D@O7ÚU^IJX$I<'!u up. hW~7W M[aCh+C9jFչ<uvV g 8%g_YvP˘qyx'In5*h-#Iz]$4`d?R2!D/3 y!-Ba84|.]r,Ƿ^KnIvn/MjQY.cM+ŬY4_+y4 􌹋4` v|!gTŏ:1qP=)ҔZD6$Lbg~Ft.s@|CqbG}/Y"yE3X.0DyBiE\[#L#F0IbSh}$wv LI[|l <)>FeAKWi}~;ҫuAWعi傿WicEE_<*1p\,̏:Z`伟$yR)fR `i\WW A`b m.KkхS}XWjU"]viUHryU/X#6BDkŊE]E.\/; _Ay7RJ5Rh L*P|ldq}9b27\Ԧ_9CpHƻ|=ƣxTۋkARzk0YM*>%2bd^]Y/C,O =jc0%2]EθHa.̣ fPF!N9EQSDҔqm;,Y!o_kP4:c\)ahu:ʆ]ykPJTpOv(%hU}{zY܇DUhү`>L c;ͻ9|5؝G'Er t_ >sԉD6ZE{ kv~ï_tEx1V&N5# $<;2 nSУw ʂ4Y$v*z>^3Td y|C w_W5SM@\'ϓbNȫ;' THkTqȸrAim#6e$\d19g"hx}7HC{|^ɇ mۼ.Wv):`9M#ƈGz]ޛ-PxC}s3_Kd"rP&?wWU{C14͵-l['FrOF)߸;IyFY ͛>Gޟ!Ao׈ǯE@vA7eYȏ>(4}[>w A󹔽Nߥ| k9˚ wm|Lg6Axc-R6쥆1!&o4g)(ݢmL?ވ[lݥnf|:K~͸mM ^>5JJ_Q McPu{Ӿ M{BVչ*D`ӲgC֣2'p. hڌ qA^ )\]Q,4k&bռ:}R8uՕ߭᥮.SF=o|n 1O)ʔ$p#DJ8 QW'+l\A`ۨ$[&L;a5m oǽ2errk<8 KX+Wє7$/dL+4 .JUR\74CQΖr>G?o6 иV!Sa;91$wZ z4 4e7ۋ odyK? `EUCSɯVYQV@vʶ{[#a4E x Lr5x}ﳻe4f^ ?N~wfjywM;E)̀QѰFjd:kv,uss!>[:>:5Qv>"D OEBwu_1=1?!/C$>tݖ,Y%=>lɲ[RDdKzXlWb}: z ic37|> %s8+̈́֌t/W<$XSR rS ,"uIjmdYi8hW(m b\C)M-PɈ2n| &R ,ږ.xX??PqqMN=]JJ>h7`uؙ-˹nHI UU@!x3 b s K ?Vg7f=vq[1`OIŷZ!@`̧Ӫ䛞Axv sn6LQU^uaY+p@ ǚ QUeԭdcRPj07u!yv"h{)G|.3:ۂօts>oEއlx,M=90`@aҠB%z ,.aH(Ez7;H3d z޾qaa:89 W+j8K}ʳ}/.U(T2oiPhۙjAX]*bG(M]? eB㋛u&^pQUhjsVt [!m*TٖaY"f0V'M+Nr.?\%|N9;[eg gS7y⪺i$.fڈS%I㴑+,% kwO[kHFfW e7 _uU'])`/#Ѧ  UJSn97B[6-T2[# >Y`ٙvnF(J6^΃+Wu<]TOa,}u?X p\Zfи$kn&]o|kiiA@aK}2L%&4(k̒&U|Ymzs=in%hq~(Rژ< lrO;K$sS |iSN[nϺz_|mƜ5CpWN!!r46V 5Hz<[?l0$m?A>:tQݿҦ`Y@V_s G~Njܢ&MQ 3\toz `WHRW8zUA{2{fm%O#k|m[-!Rl w¬mm"o݇B 4广a9QD`} sE*RkϺF9'+8۲uk.J}|ЉJZe_z(G$6NhueȲ$fYP9ϵ5+ `+Xrk;0Z@́ZV P3SȼlWڂPPJ3j f8 F?H-&itg xl"]σa(!}N] Kё*Q;L`;#y| +lθ8Pz&#<0UP(r^߉$Ӽx = s_wúqVUId-*gR,A,TjUx\ Xu(=}o5!R;q檵^CSU۽c- i,%!Lq%CJ(hkf$)ia7TۀQ&X`?7_\iB27k0 0??ui]m7oI%,/@Wu^Z5\ׯ!6 _v*bb;j{s2b5ܱNQc1Kpw`6&rA*V?C`biSb,pVbYj͙7nD`UJB;[Nbh|\d͇p]6ȆV?o(z篃r8,ᮿY_ڇ)%/_jA}Qu=x.cb?ˬUzԿ֭ӼXLu?w< XNݺ >8W$8qhn?qR u/UCIhB 4$-i E|Դb> `1i+C_KD00CeUb {,S[NXT<* *uw!GҺIHHd$ރK RA qJkz|TG#S8@0w9ncX]c5QspF2EOjC6Y<<U^Dv?0n4eӃeFggA6YEq8<ǰ"$inkս:I%dt>hx_96|!ڇk1x3DMG5'Q8 :-/b+[œ1~Q,ϣy, zc ^0{igLB2~/h_[=nTܛ,1RURhHKhc0OQBv:cB!;i0wL8P4ʌ0(i Zh4`9F|:jX55jDfZ2X`#WiѪ*$9DV2) ȊjƔPZhZ?^•Jt`[* Š=#5瑇j!˩4#>HjP䚛حdgWt[|=.Z΁2&ÉhV\)Ace^]K:w"T \1&Rƃ6ֻ_wh2BcY̫=Z V;ZN~w~-~}@ò^FOvB1ǶD@>"^ [@Lm"mq):C0K<`IPWSny3jMYfwOcӮS<ύRlhzuaC]pKwfи!Iqi8L]t123Sz/J&)'d@1L,XA3 5!@ GaӖ)IVL :'] o%戫ԧQRj {ٷ+JQr$~43 P)ɒ `G:Bv*PWYƏ<|I wZd @AE7:(> XF^J-MuaAY*?6| eR)4& jΡ#2.s;DR H!QegTٛ#̄V4UEfp( ʙ=4>7Ej1Cbڮtո+R >U_ŪS,-"cs:_߫{+/]#t!]FXM#ɋa kF`kJ f8Tye[wݬ#\ &u̅!}T\p{`gmqǩn0[#*1vER!v\J,(EA޵cRt1Гd fâר +%$ۉ$e],J* 0Y*~~ 8̂!MBQLYdXb`<հm c!ht]Wl'raKQ$9~)(r MB>BZ+ј1A2BI]JQG>KlP{@I ^uwX؅ ݢJVCllxX6sT(V\Q8q{a,> Ogmk#@Z@q[@"DBKB4g2)s,t>~'8kfYU~3J{W5,bh~٩1;4fMc6c%6*hZqoq Y5Ngi~o5N' +|k5WIaO&< fFlLz3ejoo`5h8 r|~> 8~oj+e:ݰlϴ޾ojՄXp ?cTW.|$^]TYS)n $fģ}ҔRtb= ^-稔#af2 !j&cMv]+vr]=r)WQ񠭸6*~P`/WղVsq]#6*wrd2VbSɠ9>YEF*ve>ٯ:;jb U:lpe%ax>H`_zޕt !,~6Y#@=l352^ pw># C5D~<Κ/C4 D4H>!4%xYoS=h&ݏ`ALzDO X]#9>0zIl}>u`Q_ j7FQ?ٯf?" e|P]^ '?_Ov6;ɚ^2 |V2y-=IvjL y-v?lzhAoADIN \`a(rj 'zފvݙ|;ۤx!+?^x?X;-Q(4A+ j3?W 9ؓhxNB)@Qf)+ay$Iq{eeZ.a!"%S~2[h9S9s0t,rb#-@Lp95C⶚^)[V=-}$ƤG׸Kh>NαҕU.(D|Ua 8ZֵqëO-[r׉K3IݣW;[ȇÈvX*ӬY#Yͮ1+ /`h"ʱ4*3,(FW툶ǘڵ `kT3hurQoN$CࢰCK  mûӮq`-he'ޗ@s^HCڂ(ʹȔґ*Ion ?@ғ6vm='e":=*;+2[__" iֲW* KVPJ.\Tuw1ݮD<` Te=7&6;>`V=6B:w6vEbCWjcUQ(ٽvp2\ sx]v*e-k2x(?|b-Q4Cd9S##hrEFa;Pם-ڼ~7}g=h!~߈keejDdMX`C^LeZ4] EN8Nsï4Zo#{@OlÉ $1[^&B"G c]P\c7Jl.BW!!GzVx#@<^-. !q dEf5(܅^B t~YnqgHCg*J87 mԂkX*wsL J['rL4Gt~Bd%s ÃA=u&zt *;n(^ 9Vc9c,Svf!`\ - *˒ +EJ%7Trۦ ɋf]cW7$d8HO'ϋ-XA^V]ߍjRL>^_9YjO{.Mgez~&OA 51ܑ[W5mOKC4lz3QDyj oF>g^~ qH^T p$(dzW53eWPie=<#Z%-òTs+GwD`1 ZhW0ac0ֹE]+d vnǺc)~0j!Nw!mT&S\m(ܙ8쯛sNyI{*CL{dL,Ub4Vh_|Xւc"߳У71BM\D &wwXb[=F.cDO^Qb6"V!Qqֹ#?w{ 4I%1vW L !+f8XqUx|_T뚇[=%ѱbbJ_WR6v0B,qn/TODHI]@H1J^Үz|h@"]9юĄu$FIKH@LbhgC #W213GђۉY23i_D#G.sۍ(.3u@ZPX$6(o6ͰR AtY9@uBN!Y_\- Cbwkd|gQ;4hÃETݶpds (F~k,(f8ãQ[|E7׬X̵:5?2U$5eTΈHΞYO%H]_\ І K5ј َP6{;C(0 E62_q{5~lɍgdiQrP՛vӝ> f0(aoፙf?amvOs 9$_--O~|q}Wew+sf0 rɁ<_o1y-=IlWMt kS kdAdU)td^fΧl0ʦg6;ec 퇿졺(e]DHJKl+Gdk'( a\II>f 3 )&Ϛ^g^˯qPAxܬH/1*N4lURcM U1nJ ]6HTy<5꼷@`ݵfXmr/EhFNu}S4 wLȈn}$ (D81 D<^zΫa$Ky6]+ȖE/C;mz`oEÿ<MrHoФ$<*T_>Gwtr$IA/ "· %)Q-Wk |ì+yOspHp]Es(V ^` ֠-rʀpJU\R =S_LdKpe$*la"]%.Kg,*PAK4&VT.44cMsj Ok'eYb򢬔)J!TIb'w7+tJpnmfBaGa1d8IYUh&M,:Diͫ2`qoq 5 ,KY9knfچMjk}˙d3%*^9WiR( )j%V9&B昐rZU@%`MT$ĕJQah%9 1е. _dݔ[DU FJr_Gk*7uYvR"HZo+`!CUL2gm5K^xQq|tQxlZ\x5wAǏV웏6ߍoFYqȬl~~?iS\c)56_vP=q$~[1Wl:O%Zt5O g<. 0c;=vtH4Gqc#󻫰 ;ޗ_C}mEA52+.zuvl2K/iyrbEA+d:~<(_Hhɦ% }aVZ/BfL!`ByR(ӾC^ LJfZe QW}K#;#E9@Pi\(b- /Qoh+tBތq% 6[vs|&+)$&G&Acdz7ׇn52 [RQq: CZ x],[-ZQe7ێD%!Y,V}YjW 1Bռh"rNjNI'J*$VIKISIΛ.Ύѷc<~ KH$hZ1\gg?Wꂆ X-\=IDf?MvvY9YjEwo7JDh2smo^Wx,ظVdsɓdWe}?:]^-^\#B>;^ݹ8 ڌ.gqa/.zb5+9{u_WW{l֛5X#ʰ 9&NZ!!ֶZWuV =CQfdWEfLq*p鬱Drz?-#~w͜gpojA9 7/w761ņrN88ǝmE8hyߏ"D ˝'˘{`\}-iF(a9UےsׄjCKw7)8c3c0(~cv&+1*VXRg氏+A ˫`,ӑrsP!AhTAuQkݢU*B7Ʋv=Njr-{xwSV-"gau5*;EMY ڟ@2tHjʢ5rXx؄*d$SC6ZohP{T<9E|4Itu G@Jh$,Ntm% kewТtű:[oͱѳuAST˖R9|ړzlwuk@?["-kDgM̰Lk%0d TCax&%/an[Yd#5oj 5,6(6X Θp&T%C]W#4ΛU,< c3IH -`XUXEDm,-Ā8GZ-\{wn^#W٫Mٿˋ/|{.K:⨄ѻ0!=ϖ3++^7mԫE3].*~#hTVJ? zEڣK^B f(YlG< 6*f`oyfm 7Ɣ4Pj˞?dʠ)d'!U=2D}]npj1 VTU0J1,t4El@ǔSg)d뉒ZdOt`F"55ʦ1.L7:$K*X5f2(Z\de,l!1(ؒ>?Pe(UI ޥl9* fQ7UY>ioDTQ DX|n! 08j$ :+Z/-uh;ll*jb"e}ӣ{:m|%GVJ%rC3c^GےHo&-D`_0|AxJ JhBJRsu|hϼ#6/JĘ5w[7wv +KJSqZ M]ֱN\G48w7V(Fw?#Cn#64lohh& _;Qit@O(]sVD3*LAsfM:V!'[Hصg!n䍯̊}2rb\R贑ЪGctV[߭nyiSأMwf|V~v`σGC-ʷK$?- KR+EmAxz mC*<&%dmK{4`b4-!ԤUkĺ iȱa9YT-D8N5hR!U֛>BfN׎ކMWKDɞW՜yof+3{"haI?<`3:xx{vf' [={3&1]u _|^F .E|x5`39W=3Ep|1( *"z,!:ȿMЯ|d&mDjc@=Csy@C{ JY<oiۊ%][_ =~O@ľ]ʨw>Lɻ[d?OU5ἼdQ㐓#૸ozFp>GryRT^DiB},l͚Z(wy'19 n5]U%Ǧ[qz#x}<8P c,!r{B,8ޫjb02aK*e NZjb%n5;_uz xoC]{w(8 /bE^n:D2?e]k-d:F5ix޻7aJWK{5{kZ9Ssű-k$˞*W#^)ߚ;?h1B^Z\>sm:Pͧ(w7>FCSsb&Ҷ<K JՃɞ+) A0ƥ(UЀ6>rRfHJq&ݒOJU#936ԅK{]; xHC1`d MRލ0iJ-d@謧lgCDRt6ԗ榳ݵ=#⴨l;;)}2wJS"⑴Z&" F%kn,tp1Pv6vx[UAhس0r~Ni |~(䍱[jL1k]L.}LX9O0#>Rƀga;̝iF CPci> 1 J}'' a^A +$j K)mE2+v:Wl/'Lg;FoHte .n\_} @eo4aݽy}X^trS>vʰ~vcH?KNL~*OC-?ۛ'" 4||.0]Swl{2\g?UbLʗߜyͫT:?[m=eL % ILJڃkSwr;3l;hpJ(J[+WʾHX?Y+stb@!͢"m}g e%۱`^qTN? B,z4t{<{u2E>[O'Kzn oLJY72Q{vn3|Hηx:/uX*I9W!^Ux>ߚϷ7dk>'ݩ6 ΢SlǝNcF(&qVRh-;_%O#{J;G GDs:O^[nZBG4[7-G-5 <?Չ ;9ݻ;QkL;0fyw  E:}aZkkoơ_ِ kMu.|{ĶH쑳[ypdi<9 C\ϲYvo&,"CJDBsHԎIE³HIo?Nb ;[ P]dmVcP1Wxrz' ]1"q/WԜ\%ޭrH`+;mȲbk$SK#i30{Y#ޒAA^4K&\! fT8o 9;k&t]|߭nw- 5ITEõA#׎P0@j)p!&ꬬLh{o ,#}xɐ=`1w 'O_߯ w?x~4S񿾇V}n%͋5vޭ|i[p֝' 7-\%tӒw':F 2<9h,VWy I-r9y @aSJSNM[R8L#1m1+&o `}7:Ia8\I]V\MNe4=$;צ0jD͛8K.%ΜD'3>Ǫ}#3Na06J!1 >P? -mٛ|uzOp~~7tS]i5g؝1̥wg/ᔡ۬iΕX:zu(Ӏ *el%94_IZ]1'/#R`PsPatgrl )4Sߤf ;Cŗ[- nr-NҠ5"0ײ7DI1Ơ2A6)_-g0j7b.ӭ6aOP}'UvO_ɼAټVLt dsj k|WR>ۣy<%Ic|LLEǩ!C12F>P(C-ΩAbXg9C&׺O٭]pɻ:Bp@ED,=) sJk=wͳ9u$#nVkl3E60#<ZF $.V|)~N4 q7̦ ť33v7X37P![ jHVCrې$kͳ:Ͷؠt 1\E> p <6Dg؆%E ,C߂e$8ק%m\s=/l_r9aeY:TGP8P~pؾ9 hK؎{Z{Z5ogO /W3:0٬DyuxmN'hU=&3}7q1߽bCj|wd„ji7$YS,A)'JUiPY3g"9ma2ѧ ̒w-KLK[˱-C-:'C}TzɥVЍ 2 :&Wcn{̜WQ68]3:I5&Tp=k6`ݵG[Ip6{Feaނ'D꣤R~}r5Ph30Loe2Ҝe-GVz݆a']>ܾpk3C.Kf͋Snoq:oR #[ϗ[rߘ}ϹQ|omp};zRnX ׼N8#"i@ֳքb %JW.x ?WZecD}*bQ*](m!#_8V YMTZ 2Mʓ"$-.V poww$g?&m7ծ3o~}xAcAA=-ιy Փ%;u}J/twv7Ϳ8_'nOHw/N;'=yg/_ZX3v'Q酓łZ/__ʘfxxq!ِW^kVdRDmJCL5Ta9J$xaMьe6˥;BX;zжqCո$jk )4Ryp#]H#놨 Ln8Gp7*ָ>r&@CH\o`k?a>b&4q؂Z:nBׇ\ fb;4 ˃m甼RiE:x[,)ˉ{k|cSuT}GT|mmȷĀDθKz=?S}S|MY(c1A:!IJʊ!>$Z{@׉FR1mCʷT$r9(xKs%>IF˶=Jq1!CI!rŏ~j`:}sFf tZIvn>tkH'c7[k ,؊Tk+&cn4$c\7[)6kg浲2$PCX[OZ/hMCF!3Z5YiUhp{s*F+Nq-_=W9%dC 50d,&之(;-SdVRez㺑_ `K]Xiv@;y[ ^Geٖɯߢܲ"4@nSU"Y8 ׋£yr6jƹmYr=2joV` fd.,|xCăRB4`5[JĮO_Z? 3N%hp[-wf5.kTa!%WF-XR΅EeQ.$Sϔh4[BҫRiV{.D"=O/}ӷZ,jEm4lw,G]Q2MV3yo۬-~`uŊKE_Ey,+8~jQ .io^I,W-ܪ˝˸9Ad G%A"0YdcpңqOhԸHj ' Rcħ-`3`z3J^fafBաtfo8ȭ{>P:)a͜C!'e}NƇSoビs9m9HǸ{2%Qj &bܝ~px!4#{/|: :L;OjI ]{o QSP +ۏoZR/ک=~Sg@lWvO1/ nvwCO->mݻmޝ靋6g˱LFqoGR($#(\m`g;i I8*ٖ0jjHZ"LVafLb#Y[9¤j.:cMܤɮ\$Ku[* E%ӎ'zvq#? b(߽yGTxr-UQɣ5dcukyV`5_!e;+* P+t}> b Kd\6[ɒW)O~H b 7NK Z)JRu0">)r,|= nhR byV7L.7X. Dgu܍:JU*^j$2xL՟^HA.:vObRFWr53Ҩ(D*E}qqJizR@ )K#*FV̐F^ѹpD(ܴu1Eu5;96)I~HX XF8EljPf9.um22dGknFS=0{9y{v|7]F,ogꤷΆa0Ԣ>"GImYϣVT VUKJZbʤIUʂ=I `mPGwL~];p0HV|/yrO'lR^)DT$ƨdhh4G"AT+:[ uTNȒqA%<^v@u4ze>y|<YyI Enk'RѼj;S[:@!fbgkrR ,^kˎ|b1qJ+@"ԓiW%F%E2*=AK,L` syBC E&AD/ėF{ ZC$f wLpf"{(g6 =Ef&񟲇wy[j!3ndzgqgABMRncq؜/ű%V+lR e2.J>j{Wo\jd`͟=mV H &*&LMJ;, 7]6!mM(-J1D!p"F9TDӛ-{ӣWZ3C2FouuV2!xS4An{`w9Tɕk8**e}v*,ø!)hȇ~Q8g'%0;9D=sC{/df3q#$OdH@m#ݚg>oiO SgQ)·Wvo8Qڃ]u@2>:]\-$FyeIqf >*!w6|VxـԬ|&:|f~m$,rјR)X:V]P:jsQ)}v2qV(〈"0EVPRVOݫw.fSҀ![Tu:@̀y ۑi0nl3P*3YEt eJT -d@XqaNJmUb_U٧ƋuLW(Sw::'a-)*6DWRM&KT͔zuw?/<*Ż-9Pa$ɵD Xg!@V.THY1t ]&VN4 pIuVl@ [b stQSR{y&/|(墜Pʽ# ZE*fKɺiP`AÁTzLm$E,$ioɖऀdaJskDHǽE9%7.<(o?;}a;vF6Vx7®;|4|Lyus&I;9,jo'GIla2/'o&2w/fгDYLQgv;&Ya3&g؞6pWwAj2jLKlwOms;K8jX~&ܹpnTFcV9;E#Ij{g2#qEs4_8=%zQa[]x~K?rq^"sԺQa|ϧ鲝?~<7UďiͱoZwE??^8>:bizdGaoJezNGݾh_I}/.7t￞3W_Yk3^83o7"ߏlytF"ؿG9gΑÂYۇqss7aDBH4ϧ{4\EY*Vw7~`*` D;dvt|nS--Te]o6W ?F"C n=$J3{LfraR[junewlZU`Uλۮ2س{BdGU@sr)2:&SWc*_XޢvjB{̤|"J.Vǃr12o,ڡF򞌫31on#{HM'3Q۪}xLYYj75ԙ; P}.NJqp+8ZcUK^YYPmcQ`zOȶgj#*9V;-bB1NK^mX6e2 ![Dlz V1"Ik⢺ i''u쩌y3)8׌FWQ9*@ve6xP P`Ȟ=uO٘v5.K썩GRcW;|Q`RU "<5{#kHv/'^kZu G:\#^%GdGZ-bl Nr<LN#lHIMr;|x|Mc$5Ф$GJr7 <}Ӵto7 %tʎM![ xkGκlfc)FK^!-`)BKP!'3(t#/XaJxM؈pT 6+r,HDE[Ju> w踩f2}@QX\[M{m.- kLicQ-ŤOGȌg;#9WJX8ӉIKS*K :Ȓ`=K~D̊~;n4Se}ŒW#YV`b>;5<8@Ȗe1j4~_S?tMA6fŁ-1-8"Q1:A9v&-RТ[WNftN?)ۻ:vy>!.79˗W;;!DQɯ&GzVwǟY;y/y5 jV |`xM![ ktzs FW䅈 cE+wO2-_*8K^LCh+'B4<@gdnCd-C.T3%T#0ʆ9y#ooKv[I0vN{oʔ%d VycEF(\㪴'@Qim]Bk[yo/ N=б噱4J<ꝍӎbķ[ٷO7_zv۳oAW?٦vG/~r`Gw25'ћ kwwS^cTV:lFqa洷V[39 ( !PP;T,;CZe Z 뵒#>,ިФv)9 Lq!dN҇x6:ǓxtC7-c8HcJos]|G Kݒ54~u՘Q\, vɸH7[%ЖfuK Y'j%G֢a`(D emAȖgGܢAd<p*]g񾊸V83[ʼ7b%V> %5cP[:;t6uM.&h'ЎKjƲ~E1{/ko vv턍͢#vպ͝e&cA;S(D;lus㦲_]su V9G](ka7Z벚6^_؊(d3bU^zR}-*U3yUA|"G[YSp>%9*VϜהY9+XtT *`MQҋ6,MzAnN08e@ͣ2IElB934GR>_^g@_te]r5ȊBPT \~wHV`_4qM n'?kuz yA䷪U%ry 3Kn}M-o=4뾖E&h6~53K[*/j/.VZxYNtqRg?8H#6^_^d4,1:lW*ՀP _Q|ұ1y,N6k"g;o(^y+ؽؘ)MRAO=b(=@󪿥QE! FhrlY;-mYWxG>u| 9旬UHAp|:}?DwɿΫY4gb8)$Twt2\s d&i2[Qui}}{cAAecmX> P;%<,5ɯCquQ0 `baUlh݋} 7N?sOd]rlA ۛq"BN'[~lekoYg߭s |u)(mI߲E5~i,'SG\TW?^l"B݉>ٴϿfx^Ӱ9de6x ǔ0800_IOΞTVlN;S/}-j&T*"%W]B:];J6աeޭQ];hB=R5_;Uq`syJ1^6/o'g֞.h֣'> U4"d֠ITooUQ0x?Sc1XyV}%59=//c>{ccTdl]NU>NnofKCj7.'ʽO8U5\7C#/3b3 E.Hg5i"g'Ӄs]}{4Hs?)A:&=NxmB-% r@OxM7}fד4(܌w3Vc'][og#V} b}Y* L㸗ԜQQ9A&m>NUq:cvVǹS ڱ$hwT}Le|[h{);1 =Vs(Tdr*!Z ob۸KԸ%>{UQ%v<3姦,̕\3B. 'Wxq)|ݏIvx[EYW &6*,sW)*9#sr/ ؆6T҃?Jc6LīBkFѦ%*yPݪ̏ ᧟DWO|o|}y>9?;>}?c vSo#`lD}UbޟKͧq@}˴I}{䦈- >}s&N.,|>F}K^iBM@jmҔu7Cl"5m?_߽Ydm->hI?χcO}>6@և,bn/o1@s{s%E@NĮwb̞B1O,:T6SdPg>*ʊD d.Ki>nw-RS탋̴loԿ[qzrUwF-ĕ$ޭ IvHSK~)ֹFYB[ò}f  ͓5&vu}+}yQPh Z#%Z,TW͹UHls@@qG2m{H2FtyKτpfގ=tAA)MN ǬS}Hag~GUhi?~cGJ:7def2%dAPo]֚[Ȥ>ҏD-־\ǻ/ ЕRļ̣lU9cMu]]9P{f.G/Ǯ?_Du㕜.W:Wi>QY9;Sd^C^ :yǔ ٱji)^ޭ;S&H3ä́>dDtԳ_86{ ň"ciN6H=\vNjօxyV? 6fxpaȃ%)RtxbdiRUrmrSSNC)gD9! -ɎȋgvIwOw? ]ZAsQ6MXX[9;o+KҶl8+BQZ(|E}xdOB"ԹWlX](с*kҥ9h+i~Pc0a'{L!T';F0Vv`;mR5Pר>c^g2܀i:m2 Q@A"hJkce*j3[b)o%2.R (Ua@ u,**Sjtuվ6*<ϴk3GϺ5.ke9ysf3;KOW2iG]` *dޕDc`sCZ%^Ense΄`XZM :(*q'EU#퓅()M4ٻevxhM)UPEpΦpg w7f42P. S87ʌ1Œ\@1u(ٻF$W)*3 0{ LnXtVKv{7HIK*Hl֌A3#"/4m)# vld)Gf-?.灹M8z)dNj}{AFnƸ>Ќ5 ciǥzބ mP>gtk>)Mi]}tĐӛ46WaE˛CxZe(>qfߋv ZaC,Y蒶^5. yy[rsuuW7 7w$bx䯏k QJyõtB54n/ v>>z%i#5TZ^b^aVU=>`OYbǓ!+aAˊ2eИeOp<ҴFݧ8p}4t9W "w6Ĉ-wIVmda]8>ob5@׶qg~C͔|I0d %)?rHҶeVÀ:z8nNx],L%F-oNN?Ӌ?ݞ?:/xòMZAAIx$BI8HBM- e Vs A0-9QuS|37ª!fO ڡE^ >tc"ּWdf*P* WfSA"}.e"T"3y"gx8:k5gS1{)K?mLY&c0*:ӟGw_u!2KeQ+kGMUԙDCIyz"" $7!xNY Aird&/I*h}tdр q>.XzKb#Њo47+sJ]~~U߂p_O[aYCoPE6n{xkER\(Q\$em5:wey)L2{rvi+l8o#ɟG3t5tvt߮a3xҁsܱJHbBk(vI/LyJ'yC#B1H!*w9l]%y)PvٸiHշm< Muv ,}tr ՟՟Yw=}h]0#hpM@v[l 7wWL3~?,ָ7'w^pw^p:\d*`J{^۲V;/Y" Egm99GdvN'뇯-ּ輇mE5d}pϽe8ٕ3=:n=s喙q57y>voǝe yusX /=]ejaD6O8|3W3mV($yN|Tx.,=,~A9#w YlcJTp h@N4іYb>=IIFά蠍)Q6􆠵,g9hj$ˮ?݂ 㳙a6{ 331;IVI8;FV]y bHd'Desr:T53Y Mj'A""0 )%|&JHa\l,"7b0(G塝+ja+m \ HxV}v&qcFd+#7JB%5#UR1!sQz3A1'͙f昄! 2$-bݐlݖð~ZZEp$FCᜦ30Z NNH$NBA1XdcL29@F)F YpR$1嬅1GCq%i JbQDe3zCHтcA"ѩ`6ぁ ,fσ؅в 㴒c9Q)'0YfleD)R*01\gjv#mpe+k űS{RG*a<\*ROR /rd),'NNln^гkե=[׶#@\^&b CCΐE1 C&@Ka_u1#S Mh?b# Vi621j%^[ Ztǫ//\$Jf2UQ^7^TX 4${gtGd,g ! yfe? ] `uUL7&rOww!`j"гf9͟1AIbȠI}R5>ç߷ƂiL6#}J_"ӍlVk}~/k\p<&- (EJ1PW%xo@S\,2m7\$Ry|AKS+OߘRZٽN u\T)(YʣcX4ThBUM{[׊9K>lߪF+Osۮjנ>t+ ʢoGܷ1 WH_e>.H|eo rEsh-r(a^-d2Z_>J֘끆:@C44hhFW/\]i<VE_}ƛnb[ lFNοsY" 1yUVdu۬Yqsf3r5yMJdD.7_:}ڗ t/^"+QVҔKڝ Y 'Om_/ק͋_cf"g\@(Ԗ\=Y)1^Vt؊Ѽq'`aVAïD^az(~P+httV ēW,$*yuV:-PF΍^Y ȠrVi3BqyRԙK"-KYk6MѢN,!/+]4wxыN "x_tBCHCzbNWح x>O2̾ho;T%ꦚ1BUvolF7 H;R7)ҨT́x1)h)2fyҎ=zS; A=ڪ-oŢ`׾]!gq^rޕ>#vgM|s26J&jr h+\5kD/Yp*HHk4ݚŵ[.Y PƑ1Gڄ+CQdN1_+TPX2:&fV*u_T@ݗWfeg+\DZ_Ex|55ˑ\ ɧi ~~t^{QWwI߬ xGLb)ukogAVynynƻv~oi/]9,i!%*:8l[ֲYHYZw'|[*"*5ת ]P R/Ԃ (o]E jgn6 ?"xpVK;p|_@wNG9/*%MZ+ kY_^ 9XMebpRF*%UZCB$Yucijl/!x{#kKuIge B@*}D1&jD"8rNF+ :NP kBY*Ũ& ƄmҰe- R`xJZ1RP¨c-`("J.Dq$cM$6b*RTa -7ᕜ -u;"iWasY6kTn5EW{t' +yߔؘzW‹uasaGv0 vu~\YٹYz3%CWI!ut3ȜWoX8if8uLc`\k\>(Z'Շ(E>8 ai4,>+$D+--{ 0f dV&せkU pI{K˜X8p d脰 +l<}y b _>Oz&Vw>d> fdOQՈUtG>vcII;RJ'!UoZs3;wsVaØjC;]|ֳjtZk9DUE>t- m FY})v9쥃tB@ޟӊRPjMvW8 ^HRP;#]yBιdBe*@@Ǘ,^btW'Z `5YPcdZfe3 3 {5>h&=_O0UG7, QWQJlNꙥ~ ?c%\t6ZlO\vlkZtfJ  ~oxY\C/K pll;p??A`]u;D5'ѓs6_Mۙ6{6_w ?h֛\MfП <{> | #^?;7\ޠ7[ޔׯao^:^woi:y }5˧WU&.Z*Q(?}d~ {/^Q@8헗c׏i6[%.JvGף9ܥ_\qTXx>/^|j~F3J?8m3)4P}5~U^1? F?/AO ~ ); x b;fr霋9}z.{9f6dApgݙKGry,Wɦ@y<c& 1$J *N!5PO0S50b5Mw.UJYiwJ._8;~-W",ly}wzJw}ޝMyF`$EgKbْ-ْlI"';[%;[-|fz-Ė)D@xLF8"ȀXJBPlIAxGgK%S)K_+ bU$zܣېBkҍ80 #nƞ Mֆ}R6lig U"!0+djI՜Y;FyjyjƻFVM)ydfw|p>H4GۍӢZ!ТB.z;ZitpV޾l TtaQRɼQލQ!*֊ֶp֚u&AT&¡ZM/]j83%VO X,h$Z0%ď_RIXthXxf?c@Z -#L:B(d2!q-p B†  .BbeQ8K4ΜW75@tc`tgn:Zg9*|T 0K_J{_xsd zED$qdUW޽{W90A, a@p?i N? N/A*ߏ+8}H.8ӓ Nӻ}14* 1(YģP% G?Bpz%ӷڒgiKK l~]ņ rr:[no!v|2? OrY\HΓT{E vz^1x)%i7sڼ4G og:bp_bM6P9 dY34FbƟfiyXsb&~5.\Y)P`AE%M:Jx^QB`%0Z B:4g2#3 LWoI8n>t74MP&Z`*V CM7`uxK\l>&_Pζ`  &Yj_G0% XKBdb;%*X xO[T`j3no̓~W [8 Q?C9Ǖ/H|}$0ṙԢ8\]f]N׵&ѬbsDYI*ʖ); xd6M0~b֨Jj.kNwdw9WϞ69[X <鏢_kc~1ؿ줗KYβvQkhTQ 5&:l߁ype[KZK_G/6NY>V0Ī|\[20 ǂp! g13e$Ia`fw;yD0S;`<&ͧRpWi ( 0.@8\1pӗeԯbXw/9gi?0K `~]-Gl_"I^c1V4oqxRx.mYdg!}Jl%&̮$ ˺,R} PxI  !) ѯ ?Rb9M/5eiwhUZ(8BdAZ@hXGml7ze;>@IK3&)6YR1Zc~@Ip%秽XҀW:5I!ۈ@'I šL(\蘋r^Jq *̪mV !2VJʇ[b+Cl%=r' UVӌN2ӻY+%"}ꡊVz O_.??|SJ^]m+qM2"͘3,SF/2犼.7<\œט-Pkk [ A_鑾C>S2+\@UmꊧLX)XnUf .`L}ʏkwτdSbÄ^b㛈ߏ䒑Y Qc<%J)nnɝicm9k:w EkEkL%A=^iF=d~6f*$(-ONxs\6z:\Rii@mB"*gyWJϢXkZ8] d8ABq\I2zp._<@uB-.Pf1ؠLc-ό&Jj) )+bsgG!]vW$ϣayIpf1\f Z3ij&YT,!H=Nz=,6%b +;>6kx)2*r_;)c I$ݲ7G[Mh5jwOmv)x́cB~}TF,OqFV]x pX_Ʊ58Tjk&c51Bj% (5si*$p]ji(Qʒ|to C NN/ƨmV9]ʢ9ߝS%gps$L?W薐ZdJ`8=M*Jz`r~YÙzD~}QYzPaL&J;(u kPk%Ӫtws3|i[?g4}E^U~+u䭙U{"` XY))[mN|IBI+if8cv,gJx~QL<0C,sP=M fA8EHC0\r#9M6]QfЍ,J$(G̳lvӬ`qZJv?Y}pBjPWPճ dTOko;AsiS'M9``gBjwfuTBFdocw"'-ޅlypnXjzhQ0*XOϢ{Fa:lv;==9`yb\s9)hX- eiǣp,(L b.:GS?= (|oRJ4?ϚW 2"EQV|q,k*S7@y$L@Eom-%4,jPYԠAeU *M AcX F Cp-Uf$ch+9r4""g5d-$mZB"$/փ:R:p /\@&b Hk@bS 1"B2 >.8%cc&(屁rt ydS5NȐ[)1AHXT@B8qSͰqYqqU7cBݘhG{ԁpY 0qG:_,xQa~2%E')8{ h 9(o΀]c`dgs)>#u^YgiK3}?בRs3ۯ||&ŷGWgs RC9 L %0 Ql@qaņDoalEN7[ ` 5VxNERR4`+ilՆ=X;XVy )9q"1j3f7SZ %Cuz0= eޕƛ`bNEIffvqvT4ؓ'1PXdvSl?Ӿ}5ӄ{fS1U*iZh&#'vsSKA-zl>t7|OLzxCwPK 46Ѻ??5`LÍݽϗ?3{M[_[9]-Zk^"Uΐ6ӫYpe>\q}a;ƚUj!Ac~K;v]Aq3lRz=m"++7x|r]~ͺ\DRu~ڸl{'mw! n~Sԛ襘6oc fOMT-}ٓT>+z3n ('w776vΑUs [ڑQ6d&}v5\t &[ֻ߲)[r5MI+I+" A{q% EIijGhܬIhe P^M2WJ4{gM?g)H?+j=ZLwf3?{7Y QxsѫhQh)3u)$j`OB6uJ #i&&*yT}dY!bv]>d3rΔr*7xX" Euh9JJϟ A0jJja$^umVա~`Qm9=8Z#WƑZ]f׽+:@$"E\n3{s(Q΍W\u)\ YŰVgG3&:>@rZ!-NgtF̚.,1"BniEa={L}O zkf,&쑲$p(H,A:.17!66SKkᵚڔ[UV4N" Γ5k% (5-'#ӊ#J#Ch U+DFMt01tMS\6 +{0ų!e>/TWx%}#q[8{Dg4nx'Vm;SQ"0-7Np{v5c] Y<\禔d|ufah͑heb) [JnŠڹ-Utҍ3EK]-o9,ήdWmKN\7>ZQ ȕ*v@m#iOCPp3gZ.-vQ~;7:@ 6ؘI(x6@.A|0_i@No?}F[. ;{#Sn1] AoD8Ūe2 zRu353I/anuV:v,=888ظbLN 33{ihcr \Z@^<,3!.(+<KNc<M7)[ D؁?sAa!6EcL-N(x{UBQn?!T]ž -KV.|)4JOs`TzCM$N!&*{jz\bbJ.0oQfsD៓⫷F̤T,I&Eӓ9k%Q;)(6oK-̯ 0RnglƑp)lA3iڭضuq՚nj',0k!Sb+2pDhZ3Ҟpe>dgV7wbvuE^J@f9yq:\(Aj ؙ a QP%D`TP' \xa*Ԯ*uHp54"XnmS 1:nWᴜ\ v,k_5REjX$>cMQ݀8 ?ڙ^aSpO2F8, RA~+@W (8FIps,h"R2G{F~7{9UYg2LȲݐBa`9./Z@la7HqFՕGUyx&PMu3_x)?-w)yUR(ڰ\Iʸ(*6fdB 0OG 9j%Ǔ.wdggC"aə)c]V |O}XE wF_^gE%"|t1d,{$JHL9rJiHoY:N/deYew;3V8nP1p4m@S{6߫ݎ{|[`'Z|4P ͅ]ͧ(Zs0^+LoyACy.ZkD**Ɣw'??l/Z6biaϯh^yjh F.C'ӠC;Haso=TIؼ:umJ/EwRqɉ/a ;ӯvMӪ8n]e:5DNUۮ$W3[?E`4$Hhڕkֽ+\/DKwj9zHU0]|e b ?HZ{HMTlN lSkAZ=S1" NJe`k:r-EDNJ'$c9Ӆ!wZ!ۘ> I5Nl t CNG}!U }v2F EP è(Qj]9xvR7~T G C`njs1\{Ӷ/Be~)?W/+e7*~>zLsf.c]{0vsVę@:$k!i(Kֳs`i RD:yIC4 dpNZoNzi<(ŀWIB SJզawRN(LWZ*5tF'_C^Wyw}0ˋlƃg`\ CdPDQ_d"vzOkl wjF6]ƹ~  }w%20lzI{CzI1g&9Lja #{X !FaC&rTaꥍho@: PVneV]pZ:a#}U]$36VpW馃 klD!) ] (ɱ!^]@JJ UN@7I$=  j ZV{z#ae8< tNJ/9CAzYz{nm%y]c}Y$S#c׋V4\ UQZAj7sU4fo[rOZs;~}վ=^6*썩TYfo+OGBe#c!:]1JhKHr*$\fJ\#La`hdI)Akbodsb}]+f .|qsWZH|oׄoIV]YvwivqX~72Wvk'pu˴2)LvVdg%LvVdg0?̾KVeIXL:D2g1݀`!'h ;3wFr,Vξ4\ <]̗7Yan㫳 8ahz G"?8xh:0NGd TёqV9q%ZM4 LnZ>kCZpF$hk a=73&xA-P큶l)NBh [ҧZ7cd_;w3X)T%Ĩ&+vfg46Ij]Xvg0c̄zu{~^ՆYAjaL@2fSZDe,p#/`59tڞ!O[&Vάڦ5ɖV{X5.:e*Z魆-bh 7v@lV,:>șMKL2:dcT^ >N٪`=r⠻PmÚt+6=NY$(4qg*ҵVsqOJ%^x &AbK$]L RΌۦ5 cA`սMѩ!Tˣbsws3B5>.1y%L>2ی[gLrtU0S!K:(zyEchej9j!r}n;8\'ݵUbﱻ)_/qlE-x,9&S7r e(Qȭf(|yMI%3d C@@ )`hL g Wd[{@PmjBl<8FmKP<~6߸ܓe5o+J ˕Jv+6iK(8t}0M R6+lkRx^w&mamR ӜÔ['b.vpdn@}DVb,q.X6yjoM>tYC8;>`?=>4O lFp.eLö\DQ})a\le0ݷŸEߑS}T[w(]XdebSߥr0a~0?y5J%ԅhk:eD1Jb~~q=^;Ӫ70}p)%PL>&wk65#:ge,s ňIҒD& ",g@L *vPxVQ3L$8Sc.LH0`|iy0^c#C#G-$.bcޯz.bly=N;C`K.ulngƆ5KT%Sc ƀeV)=6oŖtSS 2\:5{$,ο歚`[`IxY1! I1 {>ȏ}`PKݼJpmH?}6POzR/hxm1^D%pq=OwdOW.,Ut/f׎K•'@;ҝ'L=M]+g?'>lnj~k~zd23p;[})2K}ł(MM3uZ(i^r&l9*)U0U9##Eok\ܘC T;gj; $:īẄ9/,Eh9&3ɏqvTX!<9m$2t&I'CV$a4)"Q.22u+َ蟃0vfbj\@4|}hk.͞[fYi_fH59"%V:/8Aox짚i!VF6 k\BPw@q`xKJ%+==RTJY>vL~ۄ-eT!u3i+_/NZf2TʭP+L6w3Frń7ۓػ-h< f2zxTǦ]tK-12r!v ڶ]惽XGzjkgV\ .i5kG.;k .po7;S:"hi^M<6Rs>M5+oنS)).Y]/T;p6|.쇄*)JSkM*-_,YOTNn{C-WW'v!I>AvWڏۂ0P`Cu4(m  MΎ];IC-8LCIm>hY{!Q},Ylh].:AX;$-T@B@w Ul YBނ"{+T :G1B9 cR5*]̻V-vMZ,Wށ)J) KosHBA1XdcL2 ޵q#Eз^|_!^uO0j[g2Iߏ䞙nd?,xUX*B}XldZN^ k1Z>ZcqZ:Lx+18aA&Hb3k ýM6}}M^ќ"U;*S(iNQ䊌yTCǻa(E!Ͻ6w hӆgB ZMaY\'ӗ,$ǝ0/RkVP7ןVcnN ˆh!Qv>= iހď7kھݕ[&}& on͇Ӌ u~g7adFͭH'Gr1-jzŤ]\lCG4GTqtKv}3p4^mHpcXy]ܩV)T"Xp$9ce㥦 1iy`h%befu?n?%u^(IX4DS9KTUiL , 5gb+X~6Ng 6ZD-ooXP&^;neK_؇^ 8 fPp&BLoa-4 Z2nG]2s)t9P]g`TL,.3CZH qde<`7܆jѷ$;ޱ\QIQjzl PI UOwP:\J'b4J(_q:.GDIL"hWs_0.}> EߣB %\ť: \"…O˖x' ^Qȳ̩ؔ[d:jAb j[<کJjhՐSB(h*a 52B货5Vb5M6˝ՙFs~WӥR MN{ XZk0Xo)Sa R|dƇ'5A ZeMB'X93O`lƌi> 3`}Z[I񎻴g4҆RJ3Ww0XRn- r:aZ?GӌmZGpK]'Wɉѓs}G9J)fN$J˪y(( ]D=R z&=zC#%q ~oCpsǐZY 3!G&l4 sZ:VNY^ȌMˁ]rdYn\;2DiMZs_ "iL$35d.c,?1aiíHJZf#2RBqo!=F+0G6c:wh S(|C̣> hw JYzM6>u'QVgAVü/zqQ% &0gGxsDA,AZj!*[;]㨒T c8|C& ˰`ӼB{"X38lp9km F("_ O.h*6LWW3*CrU{NZ"wxVqU!5-m-:!i,/5 ծ-%"r 8MQ5#$-WDKyE R)H'h2|=&$_:A 4Zb!l}l 3ʔ4~lVDƀYNƆ:;rcr pTFkz8@́b54SG~@&l1NDns"PXkDt)ɜ+oNDX2䌌~+^<<x{=Luy0N2aH8Fe>19Ӿzn [|tno4xܒ@6ûYzU},LRT,mIa҄i>&1Y~͸˞5'@;)2c@,  B亡gHbDSx**]iQNGzŝA3olKwa)a%8<'w0ۧ1LǏ Z1o%ǐLB8VHq`H1 JU^H&M JG`𧿮d2|: Ō[D)5|A`,ckI [vTbxFëg%!́,ڱӂ(CCi0tR]=,?~]f0pկMUE|ۗl.FJZ둮Ùi&?ߨX$;ӺWo~e>pq)ߟ4`IB,4M EK dQ&]G}vh239-li )W؍}29lk2wݵAb sX$P%EQg&yK&&%/VKGWDQƔ5jtĞpU_ 2%X"5%X"X/{ s2hP͵S)i9)% `h1"ʋ9EW#'rn_FyI4e0/$s$F0MP-\” K N9J6YKz)4{\_5]gS3{B5β_#ȂanCr.mY|4QF gR.u*f]Y^ݜJ c]_m41zo/>,0(R5eL&r -rt+wi??T׿IT1bϸ\!Ʀmg|N򳓊1e* ?& 00O:Ad3f+PR :k"Zg=9x.zxm?@DLX㏚?%5ŊIRH௪/hg釳K&8#ȦJ%_fdϟΆC? .`k;_~Xow}H4g"q^g8@CZX4}H4zkl-jtLǽ5iIlh DYFkB\em+rvz\i,6wU$Dc7U;ǚ&sn"o o4IgKoB,Z&]~SlTr>5j6Jv㩛KЄ*g4}2[ZT bkkZTW]miڿ)r.*|]ZVC7ߥ<^ 8,$>-!SPxXѿw5sxUĪXL іse:?32R(( C*i<5|3q bsb/Lj=GFCBs [G{bMONWݓqpA 39ZQv-1Lh"]E ݳkg<Җ90>X(1?|yO$c39FWf(h4x Jxc}U q&Jrp Df-bDZx~^C%69cՔ4y HTK x ,0V-7jo2]xӴKe }҃)APQcTt-R8"A؄%ªV!(cH`avaMR`ko48HpIKEh~R4=/:z/cO>Q>Q?M m~#wb#7Kx4Bڻߐ7?n zyLU^էx{svvzqW~O7gzӋ鿹E7a|`6 63h&G/B(=Q }zm?;[ ] ݾA Œ42 zTn׾m#bUցokj/{usнf&7nTt/Ύ>?=䇰bQxi{|i5"$:Zf Ez>lr2:."1F<#@N N1GfAtJƀ's"(c@ >8o<{mj!.j;_o6h7${ ]6Pزgvɧgb0~TؤŬe},[o:T$yln^Ǐ}b=' ֳ)Lx?^HWƺvs+{hD33vP5L`ggzH;pM:SXBVo=W ȪV)RF8_H`SBɋnyli2I`^83u6z[7znl8ţLƿX&Wf>ʅ>{z/+^)lM~l]{?S~ds19sU3Ne/kUJq15L! 1J5rPjG$RvTQdr#C5xK.*6T0*ƖQlJS0ct|_y9bPAnb*ӌT uXSqr

fv]zSOee궑h_-]]rx@SIzk|DfT ̇ic1U6EPM=ړr+Ț/ES>\Z\Pw~2唻\оgcmdzcpxV>aJgס 2-xY堯nCٝ ,= Gggwo@k6km^9n?Ȗtr\$xQŦ'l_?1G wg3f.R:V: t2t}>^BeJym4eݽZgv҆}Y@zd1YOuQ9OkϕoVo^Kb[&B]$.OBS}&@1,j&&+Ӂm)5UV2iq+F ؛%8:؛ۦLdw4E+S3ǑYVp^SPX@@tQ^@.i,}8(%mMsGi:rP/¯Jyz ¥7bTIvo&B]5w4d^$-sߝKPZQO[*p~W XMRd빋RG)'0w!*|P&7BgZz>u=uZI}wsJժ}3Ie{X7zJ44,iU0xIl;z9C`u~LSmc}霴D~{_G񃗟=TSim&~eY?#wsJs,P%vq->xCSZrsTV<8>8Jk,%RkmB%= ^%fˎSZa0|VhQxͦ2O6=JʻFQ~q4o3d<_fk ,C[p;@sv%ޮėJMMܪ6 Rޤ{:'XL©˧Jn$YC= u=!&@ٍ 4<0W7Zch,x|ѸP(Lk%T[‰aFG̺hE8J&/ VpS3<_;Zb}`1:N c]p5&zvB"9Br% x:.?{€0!Q?q3 LpdS5HA]RIIP!?h^j ypCM"Q,]bDPQ{%.7u"t9>|:ʹ k(o~@vhMPi 㝽TcTdļT>q>&%d@h_K0yG /K_N8xZIjʀ)850h]`v+SA-Xί$*.1 uJHqD)G*`10<4734Iaݾf"YAqN ݏ)^Z=ɇy}=a?yDJć$W"Qer/BԐC~JDҘ9-W eų :lV2Y[ö7n (l{МҸm& $1IXEM .4uI4s$N_x"dSˀ`9XL:!5*7bZp<'3z2r}x PƟ_M׬嚘ޭ55J9c2?]!<~i.Y2{4{ݜ^%na?fhr^܀7=՟!|svv?9}^m q\};s^+g'ZG-~}M F߻4+q_]g`l̇Ŀ$palPh㼠2[.:"W8"%eRɬ,aJ3nKC܉H2Ky~+cAWG3g_?]^ <Bôz?LIrDg`I8+R`'&TNNI蟦 8i?>JPe!Hj"௤XItMf1CC +xlC𡸑IP3XEa2 '[nIWzٝU?L~#ӽ H hI*A 4l;wVv-d ?5ߔyؖfپd]3Iǰ[Qg6k˴߯K" qJ NQuUK@H@#"QkA`Ʊ  PBcaS˜k})VgwVgj*N-BC~L ։]KM Dag$2eB*A1 )|,~flD'Xۜ|×|mo(sJ$LFPk$*? C@'rjF^IDڧP12/jF^TmG>e$ #r } }ͥk fPz8&O Pa)%?X>rĔQ-^ܴZ"-bxR62}^(ʏCBc)h|,"IW@c\^+{۱/-S #mi6D?^Ma đc b  s*Nq"]p) "B"sWhE#DkkX eyxT 8E7t]!>j"@f`B A;!C+2 |2`Qo!O6-V(3 3Tnp1wf`RCXfJ©v?Q3`^mAǏ iOg|GV$2;Ƶ!C"n~ϧa^GA@":ފQK"k,:=!GKߋNa|͖_VLXfT"b҈L r'dZfD*WiQ9~DK9&YvWEnqF-S 8`%J4Vqaj6.]lێW7̻s:,>>9|-[+;5>G{M]\cqDSBl9G!Mb>)V^'U`M#)dfK 7Kn*4sVD6 UH^ٓ`LJ7a47ZR&FAĴu(\ꮜk&NΙױoXW;ZOV 3٪Pv))~TP~O{\8YWze~V0XRQR8f1ne_8O_<굯,pS e`WQRD _VʛGy ԴpNU J{G}gvS:J@ohYj~)rCMWr#6˾T4 yVq@BQ1NaBi$9`(A@Y~WyIdb#_fsܵ>0:=-鳥vEjA&ԭ0P ͰV7x 1FkUaغڿtnIBr wך/xB#1Y8X"q6`дI.ppc3]B'n6R2tXdhXRYmQy>wLƈ ZNYOyrR{H9D m,LagĢͼLvKɝ &XbJ ϡrݨg Gnͥ Wk'S T$ԍͨ0QV(U 37]7Wѭ}![ %ÎX`p_H 0<ȡg[` 8*ku sg97.8H"sJ6F m`)6>cy.G Ɛ`\+zAb1TiUse'1F^€Bj0#U~9ډzƴE6g&}Khn:HK6r:0.pK!\J}<P,g,zwooѝ`,Xp Lxv5L: 0Uulb0PSMwu ~Ācb\]e8*ck բ֠sGicU[88Q'Yue!p]RϝA-pSl͟o  3+h˚\BRh%9B⧆! 䨑E=Xv.xH'O!dyCZc¨9EgiJӗty*G>1i}lf~_; ` brr4 [P ` bF:q7Eρnx^8shkthu{4zS5XnZEпl$9vn6<cER:+5R) E NU;.pTżQkk;tPṆ+ !byxE!G8]]0u#ºJ?Ls!N=|2|o(j}@ZgkQcmU3Ihv@OhQBh1đO쑑s?)ߖQ9cPwU킹B4D D5K6JK_z-ikZ^GbXo$Vl,Ӻ'<|Ta0 WGeݓhm 4o(aEP+Їϯj7,G3_GLfJfx{bzJ&ɯ|w5|Y0JS͍o+NxS'fFde570lem@'EN͘M8Ư1?z">)\RFa/~WE+6-S7޿a V⿃&AhGfV|FgMGe%l@r!QҨJ 3Pt &4zZgb!Yl 3gH RJFhklT{Q1Բw3 iޣѓ^n^w[:d@?s8RtxFHDF*ZuAJygғa82b5ki/: 6ZO̓EI`78GM~Px8g`40{7vJW'mbǫڃˀVQac# 2qHss%h&[|!k2s}cYN;hCpc/~i{C#|rހ2%Pu'#Ō8 M>}%bX}ZoC9@6!zh)L|⏾)CІ ւX}YU56 Y6yI̤υBIj$22@#8^ |#|UnG N-qFq-`:%0% Dvt7eWE81,yd0)P&Pj߿Γ/+e"@ot<G+ GQ{Sd^@>=q߯NiJpg i"H9ْX}o/,H}+NfDj6y]ݍ':^'9ХmXy$0#릱燡zS\1UԎGJGY)RH˪P>JNHj3gI%`ߔq^'9,QniI"sᯓ^{X8NU9s*ro4uzV ,^)9  jqFBsDs7G7<&_ ${zXmFMvuaŧ2b4WƓD&ɢϱBF &T>*"h{i<7גaJq+II"ގtݥ``q\1pm\j AD%J2$(pJK $@#@4ÁivDŒ6wV8TB|i6I84o}XK Y tvq\_)ou[7{ܶ_eΖ6fڦҎIG,9/@]hA1^w& pppp.VF7Yp-s8Wp6beEվ!6t?2AHOD2Am=1X=ӛd"ʻM~}kO `1/goslE\('BD_Oչu<0{R얏fцiGCBX:t IxU {V`ETkSh28*-W 4-o/26׷~*Xu_9H/ghT|>)<3&j7=&h^UOC\F{v7G$G&ͷF7#a[.nۘfckzkn4uO'ݷR"z{?WQ厦c oPW-ߡ?(/L)Al][!R}m˝,Tu慜v|0?Ɩ7zWt<Dۖ T<،˧B=-p8P8d<,)C{#GO#Ob!ReyYo6֫_K ĻVco~1`ozx?_l(dZدlu_3eǣF~|<ҽݖc An\{ tJWOiW<DZ;f"h;viH]SIp~uQ鑍>r#)0a뱙y,$m%;Mq8Hk,:ʻ?C0v΍Oj1d xҡ0B|&vlI><RN06k#ǂI6W@~9k"p=qGXCq]ߛ^ h( Ǭvg^Ag_xѯe*o\` rVP &XBAeZ CZ+iNPg''>+wm(Asjnhr[mp<^S~9X,0⣚^vi^ԡss͡2[?f0e2U4WcK1͛ZCʽŇގ2%f-urz؛z$rN_ Bk{IG#YJ,~e/yV]1#Ii%t3L]39?g?7^LxQ;%Wɧl=W|+G󜊒~u>~<ޞҙ|ߪנxzwW5z0##Ad}|ۺ:VtWJƠ$~c֓H=bGZiZ>GtƤ'>\mqq{ސC{1$ٱ%4iۙLr["0PȂ us<-!*0!OfVQ LcܽƈQ;aEbm7xyU /OI4RI4?r'/rυ?R`͝RQu4L UVHł(Wa?_;fV DcݒT r[̪hd_reu]w(r#lEtY"i.ߎGf/<f f-<`'$$1!uQEL TPLk! @ y@"rH N>!aЕ`o$,>G`MUv^ٛV !9g*bu}4#}7{,KxzHbDAz!īMXV^벖7*ey农ԉĹ64x-x+Q;zeٷ9SlF3 f t7И,IrTbP Fw4H>ۺaS6R7ov71[Ef[.`삐s8C7j1UƉѿX4Dy#x)!7Uf2pۍSL3ܳpX Sm/5k O"ӣCQkxtko/+j]TtECMO篤Rk&tUC] ,X'Oϳ!s'38DS㱏֋5sO:brZbzt/@v#1+7[43[!ͻ\ng_%*˦W aI腉_:/Lȝ+>vsV~2UamSxZ9elIӈg`6xr6< J91LaY/1 O}^Lȹ/ NILl%=a CPٌGlz`h(a>$EHTÃT;$ʒ@8sChwH F-)y'awm|p$1j#/`jNg-˴>Ki$xi{);&n/i=~kxi%w#D9pQ#Pj(^몡~ Pp :V3-?[^{-鄒_[}Y-90}r.U|r_ 9Jiw=jtr?'(ߨ0ۏo]_Nqk\rXiV$Q +Y_nFڛXKsccinl,͍K㖬\+%(GAYj` `a9cI-`AH]>t-H"@<^[RǰwCfJCmu3z(>Įޑٜhe>f,LNQB!NQN" =Qi뺢ӛߨ9XJrw zA = B0A̯Ηuz/;?FSQ [<031 mjLg~3% '\G 4 >ShL8ȩ&^YtG#% N$sm"vNjK9ѽK@p1 =$t$ƻekO/>rv/!>S 1B#).Jy}&9on#\4%'y؊IyNE1#}wj/,:U 3kpUGK0پ6rw6F7bM{}Fw`u"{*lI5"C)1*0nN'8E#D2*?)WiA00.*@9(8XC@(Z AISS90>6B 9+3m`3ܫusv˛@(Ϡ%P\g0d0tTg<U0 pym ո/R,A⌘wM0mUA!I@%S&RdSS0^WRwem$IzY(i۽/=`<=4+iM+R.od*TŪ")-bVD|q!,RBIРzg-8d;kɈxE 2/XȑWPR&-ul.G𖜊.Z-Fk&%S0w)^Fᔴ, RC*HlS7{Zs k5[E9DwI~szQ__ovsԙ , X)\G< IGQhAQ& $z:}:v&Mv.QĂD^dIV:#U7?^Oew2dTJ}^_L<'+~7l=7=(> Pן܍ATܴX[%" 3>73/x4ʓvz:pDq=b}]?ֶn[=_!pmV0 >=:g)x9IFrV*6b\4S3:UP vb1ʒ!8܂y0Ӧ/TW-^LwB&fs??}s$mL`Ҩ5I:/=tmOD7h#O(z͜E/Y`hO%GK3R$Q#u. V:\RQIWmwсjFWd8DLacbQj ۼvI L6ڻgKˆ<CΌy,Z QF3Lr젿b=GBZη.jθ^%bkGۭd[e@iJ3}H,4x+Xr%41em FlHun(Sm2;!dh$vɣuIHҀsYz$i{ՒḘvHI!ƈs(t3B42%-#@)I nFuKS- aNżbH4E:OLV293eO2[VPQZQ͝"9K8&j=!zOƒȎF1ЫY=ZfT v,RwrscVe4RrtquyZ\VCnpT.~i.O?M'"gwa4F=d0|{LO2`>7u7;MFyd3J>|1/2/K(֕]Lsa}Km2_x f2N)y>E%Umi&\^Ԫ*]׹UyedmۙzrQ/vAATltv^ )ǿ5Gn6|;?VeH5 {eE~XwElK7Z4Lf삏(7f9SCӜ+, t/fb,wajõk٢ٜԎٝ>0՜x,Z MvYX Cү?Ρ֙XKaxA~Ax)ȫW.^}+$ WHM~ҋ5Av  !/~QZ@=fFk;smrn;v03tJ{P܀jӲ93,kp;DX4sLWNG2)T͹MA)eƎ|XAct_-/¸3bk򈪰ܮpln$:u)6/Uxinszy:_^O㛲3M4R,^1 g=[5x)Cqi߲o^>zkJ{3z+so/1hdVE?Xyg 5N18 mfH՜~?>ˉ1}2#7+&/YxF!oxWt3ZCB yBuۧf/xLr_f w%{?ۦnSqcP)3T;8CC(0awcP 2o PQaMwstL{hgc0=qK'àVVtwb }O@WP}{Z0B>2KM^ ̠%ulۂ/mobIigڈۀqcT^ȼVpО-rWk=r52UkN?kA<%C{c%ФXgDnA?s%>嫋х/ywGKTNjR;o07W7 f -@f)8Pc)+)ұZ\DƧU͕6x+R <˻aFLݝ)h6h3^6PZBw?w0z{Y0Yؒˮap`i32t`uP"jC Uϫt^P[k0V\m7n^+(ŘZp_bsM * 8V>gI+oqa4m)TZ4յS BZ#ֻ× Jj.59o\ Y+dGܟ\/B@0o۵T;s79P}k_(cƙ&=x6XQsb]_{J$$i1S |9!vىJM/OyqRnvSאRu{=k|YO_r7Qɵ?ϟ>zxV D\)]公ϓVv=Nο}8w1ff&i\4j{_˖-wcdJ Fy8t{.ɥ vP$PEr' ZsVs(l6Mn$}ksئlTͪFax6w5Ӵ1z{RtOʒIY=/&c3ò@i* e$2&Rb2grJj P~-|Y<9 G]~2p#%U҃.UO#ꙣi=t4*+ '4Wfe]+]wR"Օ9#:u3Xť*XN3Ai *0Ѷ2w}P'eG ȏ0U]eP8y>{ =ߺJK&;Of!r)O.R(|@En3eY BELXhE \bHpI=,w *|y\ѥtd]ayy@t5$uiAml g-TTJ<B2y(V"5s30;i0%pi.vhQ8\6 '5{fOv)kC,Q[\zZD^*-dzo,X,tN 69rJy4cJmLYYV\u&c?x&hڽQKh15xp!vg~S&.Cjmeņ WLk|j p 6GL J #6x>e:qʓt(]Tbe%VU6ڥ̖2L[&w8D1d5zBzAL蝣Kxn)wWwf-*\r0P` ̶.vf;{sDPԼ#o_hY= |U``"dJu0HOdRUV$&k]2X< yWʚlXֻ[.OMsC5xpv]sT~;Oz/RummAF.Jj#$o`vqI 8=v# ǽ>czޣw1'e=\'e=椾ShaQJd +ܱspudN{ =H 툮t;7f+!XpΑ -&ֵY'#i\A WI'0L'$gM( 2~i9E16Ym"Ʈs1bWw:%Y~c9q*n482`U4J86DXX%.F%[J$o(F^N *!wNu֟qpK|Tﳱ QptGP( ҩޭh(Cq,($uFT)KB}NDga.Y$O@?Tw9Z Ipϔ]pJrQ;8X|FomvQA#Ց8AxB3ScPJ*MP G( \R+p%0d3S hM&ǣci-Cl"T0jJL)3*EUJPTmx=7kb( "$Q9)'RBp3#kS ?n@gZR'԰ lrh_ 7пѮ?YckX_V=(ݤ={>?Mn:?ѽU+19T!)Ye6|Igl{ aAYYǃse[ҎjZ# \IfĶGbo][o~*Obm഻O=H*V Yζ[GcɺXyIZ|p͐ m%} ۜ@v2,ǰAC ^C$ 17MS[ (*)c.%hj-a1m< ZHl< r@p3 v~lX :B|m}8 ىrSO[r#zۣ@|?݁FZ9Q tWWCh}Z.Я2׷ 9+OU-k^wɳ$0$d6Xn&|gcf3'-p?ybV7wITz(^,w$n7ko .0Z;O2U_BتYo^EM nX@`Bΐ)Jr@CʼnA:՗hPc~S`?&uLlL`f'ONdObI9PC8p$o:;f֊K@QJBvÌ 3/=xa$\:G˄*n$4u+#RR|u%X(֜: Q?18^6^hL^^=Uml0f8RY[TrpipIp]yXl6kNT 6yN`Yon3<ZyY7my]S>P͞/U>Ǥ!'b0wp*?]^.Oj #}Lr6pp n5::)j^,w>.׹ۛ?N󟓵f\=Q$1Qp C)JHhV q% ;h0 +\VzV' .oµ[˖E&Y G{xSXV9{C`ۿ[/]ԟwPmVv%9۳Ŷ{W"۬75>|cyH?}mP>ҍkم0aշa\Q9C3|&7hۉ3m`21j}շa\Q_ { 7FPޢl7rn4f(F' LJ㑃w^/ql,›۠u/ H:E>JkMщ[H.~e[}a4+xfݬw}3 #'u׈a!;qW"#tBxgX.:pPpN^3^`P$vL_S_cQrF ;){Cy$%EkbJSfC%6ν6:ƱeLZd+Wå7ܭW37S[ 3@ߺrHeqz(VNJvVC\!Zvơ>9g^|'lGT;@S1*,&&Hj)LQfXQbA00Qpt@v^'17[6D`SҹLqM$AK "Yd잭 G-K[\m-A]Rj-:s*DWo@rU{Uxw*EWoA \~" /xHP"0(JQ-g`{tV7(cِ H䗊tǡ6j{EBc,b$Z:L-Q4.IB)00[@ewrG(JGDndvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003112455615136210126017701 0ustar rootrootJan 27 19:09:56 crc systemd[1]: Starting Kubernetes Kubelet... Jan 27 19:09:56 crc restorecon[4688]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:56 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 27 19:09:57 crc restorecon[4688]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 27 19:09:58 crc kubenswrapper[4776]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.011340 4776 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027704 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027753 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027784 4776 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027794 4776 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027803 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027811 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027820 4776 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027828 4776 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027835 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027842 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027849 4776 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027856 4776 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027863 4776 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027869 4776 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027878 4776 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027884 4776 feature_gate.go:330] unrecognized feature gate: Example Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027891 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027898 4776 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027906 4776 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027913 4776 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027919 4776 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027926 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027933 4776 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027940 4776 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027946 4776 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027953 4776 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027962 4776 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027971 4776 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027978 4776 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027985 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.027993 4776 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028000 4776 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028007 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028014 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028021 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028028 4776 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028036 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028043 4776 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028055 4776 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028064 4776 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028071 4776 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028081 4776 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028089 4776 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028096 4776 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028103 4776 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028109 4776 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028116 4776 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028123 4776 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028129 4776 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028136 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028143 4776 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028149 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028156 4776 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028162 4776 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028169 4776 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028176 4776 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028186 4776 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028194 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028202 4776 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028208 4776 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028215 4776 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028221 4776 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028228 4776 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028235 4776 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028241 4776 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028248 4776 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028254 4776 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028260 4776 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028267 4776 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028273 4776 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.028281 4776 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029219 4776 flags.go:64] FLAG: --address="0.0.0.0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029244 4776 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029256 4776 flags.go:64] FLAG: --anonymous-auth="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029267 4776 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029276 4776 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029284 4776 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029295 4776 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029304 4776 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029314 4776 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029321 4776 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029330 4776 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029338 4776 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029346 4776 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029354 4776 flags.go:64] FLAG: --cgroup-root="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029361 4776 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029369 4776 flags.go:64] FLAG: --client-ca-file="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029377 4776 flags.go:64] FLAG: --cloud-config="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029384 4776 flags.go:64] FLAG: --cloud-provider="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029392 4776 flags.go:64] FLAG: --cluster-dns="[]" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029403 4776 flags.go:64] FLAG: --cluster-domain="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029410 4776 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029418 4776 flags.go:64] FLAG: --config-dir="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029426 4776 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029434 4776 flags.go:64] FLAG: --container-log-max-files="5" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029444 4776 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029451 4776 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029459 4776 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029467 4776 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029475 4776 flags.go:64] FLAG: --contention-profiling="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029483 4776 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029490 4776 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029499 4776 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029507 4776 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029521 4776 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029529 4776 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029536 4776 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029545 4776 flags.go:64] FLAG: --enable-load-reader="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029554 4776 flags.go:64] FLAG: --enable-server="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029561 4776 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029571 4776 flags.go:64] FLAG: --event-burst="100" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029579 4776 flags.go:64] FLAG: --event-qps="50" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029587 4776 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029594 4776 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029602 4776 flags.go:64] FLAG: --eviction-hard="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029611 4776 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029619 4776 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029626 4776 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029634 4776 flags.go:64] FLAG: --eviction-soft="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029642 4776 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029649 4776 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029656 4776 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029664 4776 flags.go:64] FLAG: --experimental-mounter-path="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029671 4776 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029678 4776 flags.go:64] FLAG: --fail-swap-on="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029688 4776 flags.go:64] FLAG: --feature-gates="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029697 4776 flags.go:64] FLAG: --file-check-frequency="20s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029705 4776 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029713 4776 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029720 4776 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029728 4776 flags.go:64] FLAG: --healthz-port="10248" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029736 4776 flags.go:64] FLAG: --help="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029744 4776 flags.go:64] FLAG: --hostname-override="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029751 4776 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029760 4776 flags.go:64] FLAG: --http-check-frequency="20s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029791 4776 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029799 4776 flags.go:64] FLAG: --image-credential-provider-config="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029807 4776 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029814 4776 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029822 4776 flags.go:64] FLAG: --image-service-endpoint="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029830 4776 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029837 4776 flags.go:64] FLAG: --kube-api-burst="100" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029845 4776 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.029853 4776 flags.go:64] FLAG: --kube-api-qps="50" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030053 4776 flags.go:64] FLAG: --kube-reserved="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030061 4776 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030068 4776 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030076 4776 flags.go:64] FLAG: --kubelet-cgroups="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030083 4776 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030090 4776 flags.go:64] FLAG: --lock-file="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030098 4776 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030105 4776 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030114 4776 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030126 4776 flags.go:64] FLAG: --log-json-split-stream="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030134 4776 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030142 4776 flags.go:64] FLAG: --log-text-split-stream="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030150 4776 flags.go:64] FLAG: --logging-format="text" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030159 4776 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030167 4776 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030175 4776 flags.go:64] FLAG: --manifest-url="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030183 4776 flags.go:64] FLAG: --manifest-url-header="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030192 4776 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030202 4776 flags.go:64] FLAG: --max-open-files="1000000" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030211 4776 flags.go:64] FLAG: --max-pods="110" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030219 4776 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030233 4776 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030240 4776 flags.go:64] FLAG: --memory-manager-policy="None" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030247 4776 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030255 4776 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030263 4776 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030271 4776 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030289 4776 flags.go:64] FLAG: --node-status-max-images="50" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030296 4776 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030304 4776 flags.go:64] FLAG: --oom-score-adj="-999" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030312 4776 flags.go:64] FLAG: --pod-cidr="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030319 4776 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030331 4776 flags.go:64] FLAG: --pod-manifest-path="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030339 4776 flags.go:64] FLAG: --pod-max-pids="-1" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030347 4776 flags.go:64] FLAG: --pods-per-core="0" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030354 4776 flags.go:64] FLAG: --port="10250" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030363 4776 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030371 4776 flags.go:64] FLAG: --provider-id="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030378 4776 flags.go:64] FLAG: --qos-reserved="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030386 4776 flags.go:64] FLAG: --read-only-port="10255" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030394 4776 flags.go:64] FLAG: --register-node="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030402 4776 flags.go:64] FLAG: --register-schedulable="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030409 4776 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030423 4776 flags.go:64] FLAG: --registry-burst="10" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030431 4776 flags.go:64] FLAG: --registry-qps="5" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030439 4776 flags.go:64] FLAG: --reserved-cpus="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030446 4776 flags.go:64] FLAG: --reserved-memory="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030456 4776 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030464 4776 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030472 4776 flags.go:64] FLAG: --rotate-certificates="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030480 4776 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030487 4776 flags.go:64] FLAG: --runonce="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030495 4776 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030503 4776 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030513 4776 flags.go:64] FLAG: --seccomp-default="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030521 4776 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030529 4776 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030537 4776 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030545 4776 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030553 4776 flags.go:64] FLAG: --storage-driver-password="root" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030560 4776 flags.go:64] FLAG: --storage-driver-secure="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030568 4776 flags.go:64] FLAG: --storage-driver-table="stats" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030576 4776 flags.go:64] FLAG: --storage-driver-user="root" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030583 4776 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030591 4776 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030599 4776 flags.go:64] FLAG: --system-cgroups="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030607 4776 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030621 4776 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030628 4776 flags.go:64] FLAG: --tls-cert-file="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030636 4776 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030647 4776 flags.go:64] FLAG: --tls-min-version="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030654 4776 flags.go:64] FLAG: --tls-private-key-file="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030663 4776 flags.go:64] FLAG: --topology-manager-policy="none" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030670 4776 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030678 4776 flags.go:64] FLAG: --topology-manager-scope="container" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030686 4776 flags.go:64] FLAG: --v="2" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030697 4776 flags.go:64] FLAG: --version="false" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030708 4776 flags.go:64] FLAG: --vmodule="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030717 4776 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.030725 4776 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030929 4776 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030941 4776 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030948 4776 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030955 4776 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030962 4776 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030969 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030982 4776 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.030991 4776 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031000 4776 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031007 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031014 4776 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031022 4776 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031029 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031036 4776 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031045 4776 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031052 4776 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031059 4776 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031068 4776 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031077 4776 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031084 4776 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031092 4776 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031098 4776 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031106 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031113 4776 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031119 4776 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031126 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031134 4776 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031141 4776 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031149 4776 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031155 4776 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031189 4776 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031196 4776 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031203 4776 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031209 4776 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031216 4776 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031222 4776 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031228 4776 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031235 4776 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031244 4776 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031251 4776 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031260 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031298 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031305 4776 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031312 4776 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031320 4776 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031328 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031335 4776 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031342 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031350 4776 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031356 4776 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031362 4776 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031369 4776 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031375 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031383 4776 feature_gate.go:330] unrecognized feature gate: Example Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031389 4776 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031396 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031402 4776 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031408 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031415 4776 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031422 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031428 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031434 4776 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031441 4776 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031447 4776 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031456 4776 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031465 4776 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031473 4776 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031482 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031489 4776 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031497 4776 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.031507 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.031529 4776 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.041866 4776 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.041904 4776 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042069 4776 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042084 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042093 4776 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042102 4776 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042110 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042118 4776 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042126 4776 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042133 4776 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042141 4776 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042150 4776 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042159 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042166 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042175 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042183 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042191 4776 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042198 4776 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042206 4776 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042213 4776 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042222 4776 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042232 4776 feature_gate.go:330] unrecognized feature gate: Example Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042242 4776 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042250 4776 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042258 4776 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042265 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042275 4776 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042283 4776 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042321 4776 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042329 4776 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042337 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042345 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042352 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042361 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042369 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042377 4776 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042384 4776 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042395 4776 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042405 4776 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042414 4776 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042422 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042430 4776 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042438 4776 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042445 4776 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042456 4776 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042469 4776 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042477 4776 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042486 4776 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042494 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042502 4776 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042510 4776 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042519 4776 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042527 4776 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042535 4776 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042542 4776 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042550 4776 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042558 4776 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042566 4776 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042576 4776 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042585 4776 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042594 4776 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042603 4776 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042613 4776 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042621 4776 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042629 4776 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042638 4776 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042646 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042655 4776 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042665 4776 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042675 4776 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042683 4776 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042690 4776 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.042698 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.042711 4776 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046336 4776 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046382 4776 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046393 4776 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046402 4776 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046411 4776 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046419 4776 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046427 4776 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046439 4776 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046450 4776 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046458 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046466 4776 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046474 4776 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046482 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046490 4776 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046497 4776 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046505 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046512 4776 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046521 4776 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046529 4776 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046537 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046544 4776 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046552 4776 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046560 4776 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046567 4776 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046575 4776 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046582 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046590 4776 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046598 4776 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046606 4776 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046613 4776 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046621 4776 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046631 4776 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046640 4776 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046649 4776 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046657 4776 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046665 4776 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046673 4776 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046680 4776 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046688 4776 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046699 4776 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046709 4776 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046718 4776 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046726 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046734 4776 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046744 4776 feature_gate.go:330] unrecognized feature gate: Example Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046753 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046793 4776 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046802 4776 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046810 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046818 4776 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046825 4776 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046833 4776 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046841 4776 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046848 4776 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046859 4776 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046868 4776 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046876 4776 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046883 4776 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046891 4776 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046899 4776 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046907 4776 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046915 4776 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046923 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046931 4776 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046939 4776 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046947 4776 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046955 4776 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046963 4776 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046975 4776 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046985 4776 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.046994 4776 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.047008 4776 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.048136 4776 server.go:940] "Client rotation is on, will bootstrap in background" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.054358 4776 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.054506 4776 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.056669 4776 server.go:997] "Starting client certificate rotation" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.056711 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.056937 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-03 23:35:19.164234976 +0000 UTC Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.057040 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.080254 4776 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.083442 4776 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.084528 4776 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.102692 4776 log.go:25] "Validated CRI v1 runtime API" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.135075 4776 log.go:25] "Validated CRI v1 image API" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.138087 4776 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.147352 4776 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-27-19-05-35-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.147403 4776 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.183891 4776 manager.go:217] Machine: {Timestamp:2026-01-27 19:09:58.179489609 +0000 UTC m=+0.685094796 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:9a4ff058-5a9d-4bfd-8fc3-20b41763605e BootID:0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:cf:62:57 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:cf:62:57 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:64:19:85 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:c6:f6:ac Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:12:60:d7 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:de:9c:ac Speed:-1 Mtu:1496} {Name:eth10 MacAddress:76:ca:a7:83:2b:5d Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:12:d1:0e:6f:a5:56 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.184328 4776 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.184537 4776 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.185323 4776 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.185725 4776 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.185856 4776 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.187028 4776 topology_manager.go:138] "Creating topology manager with none policy" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.187071 4776 container_manager_linux.go:303] "Creating device plugin manager" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.187860 4776 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.187908 4776 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.188841 4776 state_mem.go:36] "Initialized new in-memory state store" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.189004 4776 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.201972 4776 kubelet.go:418] "Attempting to sync node with API server" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.202024 4776 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.202053 4776 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.202077 4776 kubelet.go:324] "Adding apiserver pod source" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.202101 4776 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.206728 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.206893 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.206934 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.207045 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.209323 4776 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.210591 4776 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.213063 4776 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214838 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214888 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214909 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214927 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214957 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214974 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.214998 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.215026 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.215047 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.215065 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.215090 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.215108 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.216013 4776 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.216863 4776 server.go:1280] "Started kubelet" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.217902 4776 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.217935 4776 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.218163 4776 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.218863 4776 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 27 19:09:58 crc systemd[1]: Started Kubernetes Kubelet. Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.222006 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.222184 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 02:55:06.265447154 +0000 UTC Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.222269 4776 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.223881 4776 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.223903 4776 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.224126 4776 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.224839 4776 server.go:460] "Adding debug handlers to kubelet server" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.224997 4776 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.226251 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.226764 4776 factory.go:55] Registering systemd factory Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.227238 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.227293 4776 factory.go:221] Registration of the systemd container factory successfully Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.227844 4776 factory.go:153] Registering CRI-O factory Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.227881 4776 factory.go:221] Registration of the crio container factory successfully Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.228004 4776 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.228046 4776 factory.go:103] Registering Raw factory Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.228080 4776 manager.go:1196] Started watching for new ooms in manager Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.228193 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="200ms" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.239980 4776 manager.go:319] Starting recovery of all containers Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.227729 4776 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.179:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188eac2996340205 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 19:09:58.216811013 +0000 UTC m=+0.722416230,LastTimestamp:2026-01-27 19:09:58.216811013 +0000 UTC m=+0.722416230,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.246455 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.246797 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.246982 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.247159 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.247411 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.247595 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.247824 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.248077 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.248266 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.248443 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.248635 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.248829 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249022 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249200 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249357 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249519 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249682 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.249918 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250080 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250233 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250398 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250557 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250740 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.250964 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.251189 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.251399 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.251540 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.251714 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.251965 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.252174 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.252358 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.252527 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.252699 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.252932 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253109 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253268 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253421 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253585 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253757 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.253969 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.254116 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.254280 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.254444 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.254606 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.254854 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.255232 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.255466 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.255642 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.255857 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.256249 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.256430 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.256606 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.256846 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257001 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257150 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257317 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257485 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257639 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.257833 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.258231 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.258417 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.258568 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.258733 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.258949 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.259112 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.259312 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.259470 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.259629 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.259845 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.260033 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.260240 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.260420 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.260594 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.260847 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261049 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261226 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261389 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261524 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261697 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.261922 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.262089 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.263675 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.263900 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.264065 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.264221 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.264375 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.264546 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267152 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267191 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267206 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267218 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267231 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267248 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267263 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267295 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267307 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267320 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267333 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267345 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267356 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267368 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267382 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267394 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267405 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267427 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267442 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267455 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267471 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267486 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267500 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267515 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267529 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267543 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267557 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267571 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267586 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267607 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267619 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267632 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267645 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267660 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267673 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267686 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267700 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267712 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267726 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267740 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267756 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267791 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267804 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267817 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267829 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267842 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267855 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267868 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267880 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267894 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267907 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267904 4776 manager.go:324] Recovery completed Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.267919 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268134 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268184 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268213 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268244 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268272 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268301 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268328 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268358 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268384 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268411 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268440 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268467 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268493 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268520 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268549 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268576 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268602 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268631 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268664 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268802 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268839 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268867 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268897 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268924 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268952 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.268978 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269006 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269033 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269061 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269081 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269104 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269125 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269144 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269164 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269182 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269207 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269232 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.269261 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.271501 4776 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.271559 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.271594 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.271623 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272377 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272417 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272447 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272478 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272506 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272533 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272559 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272586 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272610 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272637 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272660 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272684 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272708 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272732 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272757 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272815 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272842 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272869 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272895 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272920 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272949 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.272976 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273005 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273033 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273061 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273089 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273115 4776 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273140 4776 reconstruct.go:97] "Volume reconstruction finished" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.273160 4776 reconciler.go:26] "Reconciler: start to sync state" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.280364 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.282508 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.282554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.282565 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.283614 4776 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.283632 4776 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.283655 4776 state_mem.go:36] "Initialized new in-memory state store" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.298524 4776 policy_none.go:49] "None policy: Start" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.300734 4776 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.300797 4776 state_mem.go:35] "Initializing new in-memory state store" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.315481 4776 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.317836 4776 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.317976 4776 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.318267 4776 kubelet.go:2335] "Starting kubelet main sync loop" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.318492 4776 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.320944 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.321039 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.324636 4776 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.357624 4776 manager.go:334] "Starting Device Plugin manager" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.358264 4776 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.358295 4776 server.go:79] "Starting device plugin registration server" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.358814 4776 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.358837 4776 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.359005 4776 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.359100 4776 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.359114 4776 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.369820 4776 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.418666 4776 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.418794 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.420159 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.420194 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.420204 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.420557 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421242 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421274 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421543 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421580 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421593 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.421822 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422137 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422165 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422179 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422509 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422596 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422614 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422639 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.422953 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.423065 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.423115 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.423320 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426488 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426524 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426573 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426572 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426532 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426599 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426808 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.426927 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.427040 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.427085 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.428688 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.428734 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.428751 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.428841 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="400ms" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.428868 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.428952 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.429018 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.429038 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.429076 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.430079 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.430130 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.430149 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.460141 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.461265 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.461293 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.461305 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.461324 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.461762 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.179:6443: connect: connection refused" node="crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476036 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476074 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476097 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476120 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476139 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476163 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476182 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476201 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476297 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476362 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476406 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476439 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.476468 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.577931 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578005 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578045 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578078 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578112 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578141 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578267 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578364 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578277 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578343 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578355 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578324 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578654 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578416 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578689 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578744 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578829 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578886 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578942 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578972 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578859 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578903 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.579026 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578755 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.579004 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.579077 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578960 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.578966 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.662181 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.664451 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.664501 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.664519 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.664552 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.665110 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.179:6443: connect: connection refused" node="crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.680448 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.680545 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.680875 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.680944 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.756551 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.771403 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.794087 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.800736 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: I0127 19:09:58.803893 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.813726 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-9a91451be84a1d62bfad78b7750a4ae29943dfacf8f705f34ed53a2357f69e8f WatchSource:0}: Error finding container 9a91451be84a1d62bfad78b7750a4ae29943dfacf8f705f34ed53a2357f69e8f: Status 404 returned error can't find the container with id 9a91451be84a1d62bfad78b7750a4ae29943dfacf8f705f34ed53a2357f69e8f Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.816639 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-d3f9dcd3ec5f79a9778007a0b720db4d3e2ecab085e1eb74a5b5b15ac234e1ff WatchSource:0}: Error finding container d3f9dcd3ec5f79a9778007a0b720db4d3e2ecab085e1eb74a5b5b15ac234e1ff: Status 404 returned error can't find the container with id d3f9dcd3ec5f79a9778007a0b720db4d3e2ecab085e1eb74a5b5b15ac234e1ff Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.826748 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-fcfe27f64dd981187e93780e4ee52d78e1f498f9b674c17f35a9db15a305f3ad WatchSource:0}: Error finding container fcfe27f64dd981187e93780e4ee52d78e1f498f9b674c17f35a9db15a305f3ad: Status 404 returned error can't find the container with id fcfe27f64dd981187e93780e4ee52d78e1f498f9b674c17f35a9db15a305f3ad Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.828996 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-80d853025f6ec3dcdf0887d70267b8aae25a976af610dfa546f56d089ca8dedb WatchSource:0}: Error finding container 80d853025f6ec3dcdf0887d70267b8aae25a976af610dfa546f56d089ca8dedb: Status 404 returned error can't find the container with id 80d853025f6ec3dcdf0887d70267b8aae25a976af610dfa546f56d089ca8dedb Jan 27 19:09:58 crc kubenswrapper[4776]: E0127 19:09:58.829424 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="800ms" Jan 27 19:09:58 crc kubenswrapper[4776]: W0127 19:09:58.830436 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-396cd874aef4b531ec91c4b962f3a74761b8ce57a7c79e7ff58c9535b7092aee WatchSource:0}: Error finding container 396cd874aef4b531ec91c4b962f3a74761b8ce57a7c79e7ff58c9535b7092aee: Status 404 returned error can't find the container with id 396cd874aef4b531ec91c4b962f3a74761b8ce57a7c79e7ff58c9535b7092aee Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.066156 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.068173 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.068243 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.068262 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.068302 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.069040 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.179:6443: connect: connection refused" node="crc" Jan 27 19:09:59 crc kubenswrapper[4776]: W0127 19:09:59.187029 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.187146 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:59 crc kubenswrapper[4776]: W0127 19:09:59.213519 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.213642 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.219678 4776 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.222819 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 00:20:52.648664482 +0000 UTC Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.323080 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"80d853025f6ec3dcdf0887d70267b8aae25a976af610dfa546f56d089ca8dedb"} Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.324167 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"396cd874aef4b531ec91c4b962f3a74761b8ce57a7c79e7ff58c9535b7092aee"} Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.325254 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"fcfe27f64dd981187e93780e4ee52d78e1f498f9b674c17f35a9db15a305f3ad"} Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.326099 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d3f9dcd3ec5f79a9778007a0b720db4d3e2ecab085e1eb74a5b5b15ac234e1ff"} Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.326814 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9a91451be84a1d62bfad78b7750a4ae29943dfacf8f705f34ed53a2357f69e8f"} Jan 27 19:09:59 crc kubenswrapper[4776]: W0127 19:09:59.426426 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.426508 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.630922 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="1.6s" Jan 27 19:09:59 crc kubenswrapper[4776]: W0127 19:09:59.843460 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.843554 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.870085 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.871374 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.871412 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.871422 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:09:59 crc kubenswrapper[4776]: I0127 19:09:59.871442 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:09:59 crc kubenswrapper[4776]: E0127 19:09:59.871923 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.179:6443: connect: connection refused" node="crc" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.219882 4776 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.223926 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 05:31:13.683880133 +0000 UTC Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.240680 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 19:10:00 crc kubenswrapper[4776]: E0127 19:10:00.241920 4776 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.331629 4776 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="c88b752aeba6da3bdd28762050ee204e57afe42cb51ba418f815b14bf6605c0a" exitCode=0 Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.331716 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"c88b752aeba6da3bdd28762050ee204e57afe42cb51ba418f815b14bf6605c0a"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.331825 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333163 4776 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f" exitCode=0 Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333186 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333174 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333257 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333264 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.333372 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.334290 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.334316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.334324 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.335160 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1" exitCode=0 Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.335195 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.335253 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.336038 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.336082 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.336103 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.337842 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.338950 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.338980 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.339031 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.340383 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.340413 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.340434 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.340451 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.340460 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.341244 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.341281 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.341296 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.342218 4776 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="db19095a7ff7828fa9cc2b415af7cc5092247b5d4ad739a121d537a0957af9d7" exitCode=0 Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.342261 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"db19095a7ff7828fa9cc2b415af7cc5092247b5d4ad739a121d537a0957af9d7"} Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.342346 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.343062 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.343085 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:00 crc kubenswrapper[4776]: I0127 19:10:00.343094 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:00 crc kubenswrapper[4776]: W0127 19:10:00.789921 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:00 crc kubenswrapper[4776]: E0127 19:10:00.790000 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.219144 4776 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.224274 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 05:44:44.091378323 +0000 UTC Jan 27 19:10:01 crc kubenswrapper[4776]: E0127 19:10:01.231835 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="3.2s" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.346298 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.346339 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.346350 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.346359 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.349039 4776 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5299d46ff1281594d3f3713a33020f1363641e49a84d8f93a7717610dd88c593" exitCode=0 Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.349089 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5299d46ff1281594d3f3713a33020f1363641e49a84d8f93a7717610dd88c593"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.349168 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.349990 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.350012 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.350020 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.352367 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"cdc59108ec466cb68643315caaee01cf58678b6234d01dfa49f24cdf41e5abf7"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.352412 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.353367 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.353395 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.353408 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.355487 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.355498 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.355526 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.355537 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a"} Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.355564 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356183 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356211 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356223 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356497 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356529 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.356636 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.472294 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.477389 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.477425 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.477434 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.477457 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:10:01 crc kubenswrapper[4776]: E0127 19:10:01.477941 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.179:6443: connect: connection refused" node="crc" Jan 27 19:10:01 crc kubenswrapper[4776]: W0127 19:10:01.689334 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:01 crc kubenswrapper[4776]: E0127 19:10:01.689428 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:10:01 crc kubenswrapper[4776]: I0127 19:10:01.811859 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:01 crc kubenswrapper[4776]: W0127 19:10:01.868683 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:01 crc kubenswrapper[4776]: E0127 19:10:01.868824 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:10:01 crc kubenswrapper[4776]: W0127 19:10:01.908628 4776 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.179:6443: connect: connection refused Jan 27 19:10:01 crc kubenswrapper[4776]: E0127 19:10:01.908712 4776 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.179:6443: connect: connection refused" logger="UnhandledError" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.224493 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 19:04:15.38329644 +0000 UTC Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.361607 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.363593 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c" exitCode=255 Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.363709 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c"} Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.364335 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.365330 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.365404 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.365418 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.365934 4776 scope.go:117] "RemoveContainer" containerID="f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366173 4776 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="97a100f3ec931c301f32f336ba9b84ef3ba7d5b5c6a1404006132e12722f2c19" exitCode=0 Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366204 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"97a100f3ec931c301f32f336ba9b84ef3ba7d5b5c6a1404006132e12722f2c19"} Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366270 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366303 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366316 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366343 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.366680 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369745 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369816 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369835 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369918 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.369988 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371503 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371535 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371548 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371574 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371599 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.371613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.965053 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:02 crc kubenswrapper[4776]: I0127 19:10:02.996797 4776 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.225305 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 10:09:59.737111555 +0000 UTC Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372033 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a840e3b140bb14b33a470d2764aaffa544df2c3ff3e1bbb9a699481f81993a03"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372104 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fe9e33a2c91ff9dbaea38065a38c86568b10bb8b1a7952ae4e4506a41b12c4e6"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372130 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d0a62fde7df7c815e25dc99aa3e8f48b8d8933dfd7a566e4b5c1de8a10951e66"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372152 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8d6494fa9552fc0b9459d894cb4f1247e55a57b3d80b99397982aa5eabf1a75b"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372173 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e8d6f965bd1c9edde3f5bc918674549601bff24282043262a99b9deff5f68ce3"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.372185 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.374083 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.374108 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.374145 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.374161 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.375901 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376355 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376626 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b"} Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376902 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376932 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376964 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.376991 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:03 crc kubenswrapper[4776]: I0127 19:10:03.377003 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.226149 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 21:02:25.910455371 +0000 UTC Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.378451 4776 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.378524 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.378475 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380206 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380231 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380242 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380263 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380297 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.380318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.528314 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.529538 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.678881 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.680610 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.680673 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.680691 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.680730 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:10:04 crc kubenswrapper[4776]: I0127 19:10:04.804615 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.226411 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 03:38:43.644239164 +0000 UTC Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.234751 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.381022 4776 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.381070 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.381102 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382106 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382148 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382163 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382276 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:05 crc kubenswrapper[4776]: I0127 19:10:05.382294 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.026332 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.026741 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.027983 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.028093 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.028157 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.226820 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 07:08:10.637172941 +0000 UTC Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.383706 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.385042 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.385104 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:06 crc kubenswrapper[4776]: I0127 19:10:06.385130 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.121741 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.122029 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.123878 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.123942 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.123957 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.129508 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.228027 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 00:02:34.553613055 +0000 UTC Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.385516 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.386314 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.386351 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.386364 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.587171 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.587571 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.588688 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.588722 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:07 crc kubenswrapper[4776]: I0127 19:10:07.588734 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.228984 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 05:27:16.830773056 +0000 UTC Jan 27 19:10:08 crc kubenswrapper[4776]: E0127 19:10:08.369964 4776 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.404053 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.404304 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.405446 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.405484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:08 crc kubenswrapper[4776]: I0127 19:10:08.405495 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:09 crc kubenswrapper[4776]: I0127 19:10:09.027208 4776 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 19:10:09 crc kubenswrapper[4776]: I0127 19:10:09.027338 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 27 19:10:09 crc kubenswrapper[4776]: I0127 19:10:09.229992 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 14:55:43.176592963 +0000 UTC Jan 27 19:10:10 crc kubenswrapper[4776]: I0127 19:10:10.231016 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:15:45.239072024 +0000 UTC Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.231854 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 21:56:20.437926078 +0000 UTC Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.819032 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.819221 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.820506 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.820556 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:11 crc kubenswrapper[4776]: I0127 19:10:11.820569 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.224491 4776 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.232894 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 16:01:25.511251826 +0000 UTC Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.832176 4776 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.832294 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.841298 4776 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.841378 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.970251 4776 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]log ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]etcd ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/openshift.io-api-request-count-filter ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/openshift.io-startkubeinformers ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/openshift.io-openshift-apiserver-reachable ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/openshift.io-oauth-apiserver-reachable ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/generic-apiserver-start-informers ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/priority-and-fairness-config-consumer ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/priority-and-fairness-filter ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-apiextensions-informers ok Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/start-apiextensions-controllers failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/crd-informer-synced failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-system-namespaces-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-cluster-authentication-info-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-kube-apiserver-identity-lease-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-kube-apiserver-identity-lease-garbage-collector ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-legacy-token-tracking-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-service-ip-repair-controllers ok Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/rbac/bootstrap-roles failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/scheduling/bootstrap-system-priority-classes failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/priority-and-fairness-config-producer ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/bootstrap-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/aggregator-reload-proxy-client-cert ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/start-kube-aggregator-informers ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/apiservice-status-local-available-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/apiservice-status-remote-available-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/apiservice-registration-controller failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/apiservice-wait-for-first-sync ok Jan 27 19:10:12 crc kubenswrapper[4776]: [-]poststarthook/apiservice-discovery-controller failed: reason withheld Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/kube-apiserver-autoregistration ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]autoregister-completion ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/apiservice-openapi-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: [+]poststarthook/apiservice-openapiv3-controller ok Jan 27 19:10:12 crc kubenswrapper[4776]: livez check failed Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.970302 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.998055 4776 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 27 19:10:12 crc kubenswrapper[4776]: I0127 19:10:12.998141 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 27 19:10:13 crc kubenswrapper[4776]: I0127 19:10:13.233828 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 05:45:59.237270755 +0000 UTC Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.234780 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 06:42:51.472650237 +0000 UTC Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.585536 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.585811 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.587158 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.587197 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.587210 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:14 crc kubenswrapper[4776]: I0127 19:10:14.615468 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 27 19:10:15 crc kubenswrapper[4776]: I0127 19:10:15.235951 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 02:30:36.531879617 +0000 UTC Jan 27 19:10:15 crc kubenswrapper[4776]: I0127 19:10:15.404585 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:15 crc kubenswrapper[4776]: I0127 19:10:15.405517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:15 crc kubenswrapper[4776]: I0127 19:10:15.405635 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:15 crc kubenswrapper[4776]: I0127 19:10:15.405654 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:16 crc kubenswrapper[4776]: I0127 19:10:16.236608 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 16:52:33.723379171 +0000 UTC Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.237088 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 01:22:55.863608577 +0000 UTC Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.588358 4776 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.588492 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 27 19:10:17 crc kubenswrapper[4776]: E0127 19:10:17.842839 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.850336 4776 trace.go:236] Trace[92709757]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 19:10:07.577) (total time: 10272ms): Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[92709757]: ---"Objects listed" error: 10272ms (19:10:17.850) Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[92709757]: [10.272524702s] [10.272524702s] END Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.850375 4776 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 27 19:10:17 crc kubenswrapper[4776]: E0127 19:10:17.851066 4776 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.857187 4776 trace.go:236] Trace[2017311669]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 19:10:06.300) (total time: 11557ms): Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[2017311669]: ---"Objects listed" error: 11556ms (19:10:17.856) Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[2017311669]: [11.55702679s] [11.55702679s] END Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.857236 4776 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.857361 4776 trace.go:236] Trace[1592678064]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 19:10:05.460) (total time: 12396ms): Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[1592678064]: ---"Objects listed" error: 12396ms (19:10:17.857) Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[1592678064]: [12.396947448s] [12.396947448s] END Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.857398 4776 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.857825 4776 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.867181 4776 trace.go:236] Trace[1775272272]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (27-Jan-2026 19:10:05.679) (total time: 12187ms): Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[1775272272]: ---"Objects listed" error: 12186ms (19:10:17.866) Jan 27 19:10:17 crc kubenswrapper[4776]: Trace[1775272272]: [12.18723831s] [12.18723831s] END Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.867233 4776 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.869847 4776 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.973419 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:17 crc kubenswrapper[4776]: I0127 19:10:17.979527 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.217249 4776 apiserver.go:52] "Watching apiserver" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.221333 4776 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.221685 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.222127 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.222376 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.224464 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.224576 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.224630 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.224585 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.225182 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.225345 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.226060 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.227308 4776 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.224756 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.231810 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.231893 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.231976 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.231809 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.231831 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.232372 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.232426 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.232745 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.235396 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.237274 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.237329 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 10:00:22.005680621 +0000 UTC Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.241261 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.259399 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260268 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260303 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260330 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260351 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260371 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260395 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260415 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260437 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260460 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260480 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260506 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260530 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260554 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260572 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260589 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260608 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260624 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260650 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260667 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260685 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260702 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260717 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260731 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260747 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260762 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260811 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260832 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260848 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260865 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260880 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260898 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260915 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260929 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260947 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260968 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.260999 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261015 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261031 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261047 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261063 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261076 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261092 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261108 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261123 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261139 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261155 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261175 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261189 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261204 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261219 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261234 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261247 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261262 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261278 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261293 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261307 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261321 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261336 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261352 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261385 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261409 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261423 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261440 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261455 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261471 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261487 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261504 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261520 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261541 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261566 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261592 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261613 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261634 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261667 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261691 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261708 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261724 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261740 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261755 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261802 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261825 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261842 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261861 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261876 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261894 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261912 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261930 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261948 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261966 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.261984 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262001 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262020 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262037 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262052 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262068 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262082 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262096 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262110 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262126 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262142 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262159 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262174 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262189 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262204 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262220 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262236 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262252 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262270 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262285 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262301 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262316 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262333 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262349 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262365 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262382 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262398 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262413 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262428 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262444 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262459 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262475 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262563 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262582 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262598 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262612 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262627 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262641 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262656 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262672 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262687 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262704 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262721 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262737 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262752 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262824 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262843 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262863 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262878 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262893 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262909 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262924 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262943 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262961 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262978 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.262993 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263011 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263027 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263043 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263058 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263074 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263089 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263105 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263122 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263137 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263155 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263170 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263186 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263203 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263219 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263234 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263250 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263266 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263284 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263300 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263319 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263335 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263351 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263368 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263383 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263398 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263415 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263431 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263447 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263463 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263482 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263498 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263516 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263532 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263553 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263574 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263596 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263614 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263630 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263647 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263663 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263681 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263715 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263730 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263746 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263780 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263805 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263820 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263838 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263867 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263897 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263938 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.263979 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264000 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264017 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264035 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264053 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264070 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264091 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264108 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264126 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264144 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264161 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264177 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.264194 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.265431 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.265673 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.265692 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.265895 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266033 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266187 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266264 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266335 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266490 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266635 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266682 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.266801 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.267008 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.267213 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.267570 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.267794 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.267955 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268315 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268501 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268593 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268674 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268693 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268802 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268883 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.268900 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.269130 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.269243 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.269362 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.269925 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270058 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270084 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270327 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270359 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270531 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.270607 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.271368 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.271882 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.271992 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.272179 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.272313 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.272353 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.272571 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.272969 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.273101 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.273116 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.273285 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.273474 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.274567 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.274614 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.274716 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.274838 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.276924 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.276940 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277044 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277129 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277262 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277365 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277544 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277618 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277778 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277830 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277824 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.277992 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.278106 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.278134 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.278253 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:18.778236736 +0000 UTC m=+21.283841883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.278264 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.278363 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.278717 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279084 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279297 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279531 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279808 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279864 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.279958 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280014 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280204 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280543 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280551 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280785 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280824 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280829 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280939 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.280978 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281012 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281413 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281562 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281703 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281802 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281896 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.281913 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.282268 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.282301 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.282310 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.282371 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284059 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284118 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284389 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284456 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284725 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.284949 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285067 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285018 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285130 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285172 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285301 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285382 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285449 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285557 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.285832 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.286017 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.286219 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.286402 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.286474 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:18.786451125 +0000 UTC m=+21.292056452 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.286915 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.286975 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.287339 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.287656 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.287707 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.287867 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.288199 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.288412 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.288627 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.288731 4776 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.288819 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.289625 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.289727 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.290385 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:18.790249264 +0000 UTC m=+21.295854451 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.290831 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.290905 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.291171 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.291370 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.289891 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.290074 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.292087 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.292359 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.293056 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.293079 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.293400 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.293609 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.293709 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.294027 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.296247 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.297289 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.305686 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.306134 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.306488 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.306559 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.306630 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.307157 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.307488 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.307610 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.307754 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.307224 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.307291 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.307439 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.308221 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.308355 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:18.808327982 +0000 UTC m=+21.313933129 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.308486 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.308578 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.308673 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:18.808663953 +0000 UTC m=+21.314269090 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.308239 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.308986 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.308991 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.309067 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.309526 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.309783 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.309859 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.309994 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.310207 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.310275 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.310502 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.310724 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.311154 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.311224 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.311481 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.311891 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.312250 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.317391 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.322289 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.322729 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.323109 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.323151 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.323205 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.323238 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.323480 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.324363 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.324471 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.324701 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.325882 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.326377 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.326454 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.326936 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.329431 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.329804 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.330194 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.330388 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.330510 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.330516 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.330931 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.331097 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.331858 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332099 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332130 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332147 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332464 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332497 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.332804 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.333420 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.333543 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.333670 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.335125 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.337437 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.337662 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.337936 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.338269 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.338612 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.340179 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.340523 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.340681 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.341900 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.343237 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.344574 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.345810 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.347246 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.347670 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.348043 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.349097 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.350022 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.351634 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.353473 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.353946 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.355213 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.355862 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.357177 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.357381 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.358188 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.358719 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.360204 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.360238 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.360992 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.361577 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.362031 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.363212 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364728 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364869 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364932 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364952 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364970 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.364987 4776 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365000 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365013 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365025 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365037 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365048 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365061 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365073 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365086 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365098 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365109 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365123 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365137 4776 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365150 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365161 4776 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365171 4776 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365181 4776 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365192 4776 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365241 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365295 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365336 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365384 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365420 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365459 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365482 4776 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365501 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365520 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365538 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365557 4776 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365574 4776 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365591 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365607 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365625 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365645 4776 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365662 4776 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365679 4776 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365698 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365716 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365733 4776 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365750 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365798 4776 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365815 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365873 4776 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365891 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365909 4776 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365929 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365947 4776 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365964 4776 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365981 4776 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.365997 4776 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366015 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366032 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366049 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366067 4776 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366085 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366102 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366119 4776 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366136 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366152 4776 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366170 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366187 4776 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366203 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366221 4776 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366239 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366261 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366284 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366302 4776 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366320 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366342 4776 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366359 4776 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366375 4776 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366396 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366420 4776 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366444 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366460 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366466 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.366489 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.380886 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.380940 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.380961 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.380983 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381001 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381017 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381036 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381053 4776 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381069 4776 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381141 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381159 4776 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381179 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381198 4776 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381215 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381233 4776 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381250 4776 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381267 4776 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381284 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381301 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381319 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381336 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381352 4776 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381369 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381385 4776 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381403 4776 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381420 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381437 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381458 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381476 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381492 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381508 4776 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381526 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381543 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381563 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381581 4776 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381599 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381617 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381636 4776 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381653 4776 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381670 4776 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381693 4776 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381712 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381730 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381750 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381788 4776 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381805 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381823 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381840 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381858 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381879 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381899 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381916 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381932 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381949 4776 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381969 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.381986 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382001 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382018 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382035 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382053 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382070 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382087 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382106 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382127 4776 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382144 4776 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382162 4776 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382180 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382225 4776 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382243 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382259 4776 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382275 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382293 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382315 4776 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382336 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382357 4776 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382381 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382398 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382417 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382433 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382450 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382467 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382483 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382500 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382517 4776 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382535 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382552 4776 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382568 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382585 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382601 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382618 4776 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382636 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382653 4776 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382671 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382688 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382706 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382722 4776 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382739 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382756 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382795 4776 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382812 4776 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382829 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382847 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382864 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382881 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382900 4776 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382917 4776 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382934 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382955 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382971 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.382988 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383005 4776 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383022 4776 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383039 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383056 4776 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383073 4776 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383090 4776 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383107 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383125 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.383142 4776 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.391083 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.391608 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.392964 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.393488 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.394152 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.395149 4776 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.395281 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.398732 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.400723 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.401267 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.403185 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.404193 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.405582 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.405902 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.406432 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.407650 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.408436 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.409823 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.411062 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.411940 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.412488 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.413072 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.414015 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.414855 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.415721 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.416023 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.416215 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.417200 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.417800 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.418509 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.419517 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.420368 4776 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.426827 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.436659 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.445138 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.454563 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.465208 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.474980 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.484439 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.545458 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.560032 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 27 19:10:18 crc kubenswrapper[4776]: W0127 19:10:18.561426 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-843cba19196dbaa861e3c643a499e7ba034bafccadc038596daf165a84252ba0 WatchSource:0}: Error finding container 843cba19196dbaa861e3c643a499e7ba034bafccadc038596daf165a84252ba0: Status 404 returned error can't find the container with id 843cba19196dbaa861e3c643a499e7ba034bafccadc038596daf165a84252ba0 Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.575269 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 27 19:10:18 crc kubenswrapper[4776]: W0127 19:10:18.588088 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-9d21031c1509a04b035a708650dd93f88c6860723fd36bfaf49497b1d2a1fe24 WatchSource:0}: Error finding container 9d21031c1509a04b035a708650dd93f88c6860723fd36bfaf49497b1d2a1fe24: Status 404 returned error can't find the container with id 9d21031c1509a04b035a708650dd93f88c6860723fd36bfaf49497b1d2a1fe24 Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.683578 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.705254 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.725590 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.736756 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.747073 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.757859 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.769478 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.779736 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.785870 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.786097 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:19.786063479 +0000 UTC m=+22.291668676 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.796314 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.887455 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.887533 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.887577 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:18 crc kubenswrapper[4776]: I0127 19:10:18.887618 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887817 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887848 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887895 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887887 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887977 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.888003 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887938 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:19.887914079 +0000 UTC m=+22.393519266 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.887856 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.888119 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.888099 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:19.888072244 +0000 UTC m=+22.393677451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.888191 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:19.888162857 +0000 UTC m=+22.393768004 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:18 crc kubenswrapper[4776]: E0127 19:10:18.888223 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:19.888213509 +0000 UTC m=+22.393818656 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.238044 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 16:43:58.619084265 +0000 UTC Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.319675 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.319941 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.418296 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9d21031c1509a04b035a708650dd93f88c6860723fd36bfaf49497b1d2a1fe24"} Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.419618 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"192576e74986488cdf784a30daaf7af052252303a9a48030f686a02f8a172f2f"} Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.420655 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"843cba19196dbaa861e3c643a499e7ba034bafccadc038596daf165a84252ba0"} Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.794783 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.794945 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:21.794920973 +0000 UTC m=+24.300526110 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.895374 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.895447 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.895480 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:19 crc kubenswrapper[4776]: I0127 19:10:19.895506 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895574 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895610 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895626 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895688 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895697 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895739 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895757 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895744 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895704 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:21.895681388 +0000 UTC m=+24.401286735 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895951 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:21.895927606 +0000 UTC m=+24.401532783 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895968 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:21.895960297 +0000 UTC m=+24.401565544 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:19 crc kubenswrapper[4776]: E0127 19:10:19.895990 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:21.895980437 +0000 UTC m=+24.401585684 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.238557 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 00:43:52.251017155 +0000 UTC Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.319091 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.319229 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:20 crc kubenswrapper[4776]: E0127 19:10:20.319622 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:20 crc kubenswrapper[4776]: E0127 19:10:20.319507 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.322852 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.425123 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a"} Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.425166 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b"} Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.426401 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f"} Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.436512 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.447712 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.457960 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.469479 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.480338 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.489655 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.497003 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.506208 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.517536 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.531143 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.538601 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.546576 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.555399 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.564896 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.574506 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:20 crc kubenswrapper[4776]: I0127 19:10:20.585934 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.238660 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 02:26:07.697191078 +0000 UTC Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.319301 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.319415 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.431904 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a"} Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.449681 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.471159 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.487516 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.500921 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.512583 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.526055 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.538871 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.552576 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:21Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.812123 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.812368 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:25.812326479 +0000 UTC m=+28.317931666 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.913027 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.913106 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.913160 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:21 crc kubenswrapper[4776]: I0127 19:10:21.913218 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913293 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913327 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913372 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913387 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913409 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:25.913379373 +0000 UTC m=+28.418984560 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913467 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:25.913440715 +0000 UTC m=+28.419045872 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913337 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913517 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:25.913508688 +0000 UTC m=+28.419113845 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913564 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913578 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913588 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:21 crc kubenswrapper[4776]: E0127 19:10:21.913631 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:25.913618931 +0000 UTC m=+28.419224088 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:22 crc kubenswrapper[4776]: I0127 19:10:22.239285 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 15:55:53.763565905 +0000 UTC Jan 27 19:10:22 crc kubenswrapper[4776]: I0127 19:10:22.319635 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:22 crc kubenswrapper[4776]: I0127 19:10:22.319637 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:22 crc kubenswrapper[4776]: E0127 19:10:22.319790 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:22 crc kubenswrapper[4776]: E0127 19:10:22.320114 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:22 crc kubenswrapper[4776]: I0127 19:10:22.973594 4776 csr.go:261] certificate signing request csr-86vhb is approved, waiting to be issued Jan 27 19:10:22 crc kubenswrapper[4776]: I0127 19:10:22.986200 4776 csr.go:257] certificate signing request csr-86vhb is issued Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.003934 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-xnmft"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.004262 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: W0127 19:10:23.007007 4776 reflector.go:561] object-"openshift-dns"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 27 19:10:23 crc kubenswrapper[4776]: E0127 19:10:23.007053 4776 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 27 19:10:23 crc kubenswrapper[4776]: W0127 19:10:23.007109 4776 reflector.go:561] object-"openshift-dns"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 27 19:10:23 crc kubenswrapper[4776]: E0127 19:10:23.007124 4776 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 27 19:10:23 crc kubenswrapper[4776]: W0127 19:10:23.007499 4776 reflector.go:561] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": failed to list *v1.Secret: secrets "node-resolver-dockercfg-kz9s7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-dns": no relationship found between node 'crc' and this object Jan 27 19:10:23 crc kubenswrapper[4776]: E0127 19:10:23.007532 4776 reflector.go:158] "Unhandled Error" err="object-\"openshift-dns\"/\"node-resolver-dockercfg-kz9s7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-resolver-dockercfg-kz9s7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-dns\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.024220 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.042638 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.052445 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.063572 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.075984 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.080936 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qz4kb"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.081349 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.082818 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.083100 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.084912 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.084920 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.093061 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.104065 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.115733 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.123724 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf4st\" (UniqueName: \"kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.123821 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6cc042e-9fd6-43f0-8655-72c53984b85e-hosts-file\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.124364 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.135136 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.152891 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.181816 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.204733 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.218844 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224410 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgkwp\" (UniqueName: \"kubernetes.io/projected/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-kube-api-access-hgkwp\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224461 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf4st\" (UniqueName: \"kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224492 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6cc042e-9fd6-43f0-8655-72c53984b85e-hosts-file\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224517 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-serviceca\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224533 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-host\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.224590 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a6cc042e-9fd6-43f0-8655-72c53984b85e-hosts-file\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.229209 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.240172 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 13:54:00.094726407 +0000 UTC Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.240643 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.254285 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.267437 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.280452 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.318541 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:23 crc kubenswrapper[4776]: E0127 19:10:23.318660 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.325725 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-serviceca\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.325786 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-host\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.325808 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgkwp\" (UniqueName: \"kubernetes.io/projected/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-kube-api-access-hgkwp\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.326844 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-host\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.328456 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-serviceca\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.348212 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgkwp\" (UniqueName: \"kubernetes.io/projected/3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f-kube-api-access-hgkwp\") pod \"node-ca-qz4kb\" (UID: \"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\") " pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.394545 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qz4kb" Jan 27 19:10:23 crc kubenswrapper[4776]: W0127 19:10:23.404787 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fe3d3bb_f8f9_4b84_9b65_8d60cab75a2f.slice/crio-e2daa62519fc6f94b8c9d9973847d68f8c87a6b72f7c0c34df4815df097a3e5d WatchSource:0}: Error finding container e2daa62519fc6f94b8c9d9973847d68f8c87a6b72f7c0c34df4815df097a3e5d: Status 404 returned error can't find the container with id e2daa62519fc6f94b8c9d9973847d68f8c87a6b72f7c0c34df4815df097a3e5d Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.437538 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qz4kb" event={"ID":"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f","Type":"ContainerStarted","Data":"e2daa62519fc6f94b8c9d9973847d68f8c87a6b72f7c0c34df4815df097a3e5d"} Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.863275 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-z2w9t"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.864264 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.867078 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.867551 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-wmmtr"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.867954 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wmmtr" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.868013 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.868089 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.868285 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.868324 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.868693 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-d7d6j"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.869341 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-hbtwl"] Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.869625 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.869849 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.870447 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.870638 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.871211 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.871360 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.871754 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.872663 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.873220 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.873793 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.874439 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.874518 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.874615 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.874731 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.874937 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.877428 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.894021 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.912843 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.928603 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.931699 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blcnp\" (UniqueName: \"kubernetes.io/projected/7ebf810d-762c-445f-9705-a74d274fcf1b-kube-api-access-blcnp\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.931755 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ebf810d-762c-445f-9705-a74d274fcf1b-proxy-tls\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.931819 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ebf810d-762c-445f-9705-a74d274fcf1b-mcd-auth-proxy-config\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.931929 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ebf810d-762c-445f-9705-a74d274fcf1b-rootfs\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.942753 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.953041 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.962792 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.972632 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.981267 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.987415 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-27 19:05:22 +0000 UTC, rotation deadline is 2026-12-09 09:28:27.744040447 +0000 UTC Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.987471 4776 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7574h18m3.756572997s for next certificate rotation Jan 27 19:10:23 crc kubenswrapper[4776]: I0127 19:10:23.991550 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:23Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.002274 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.010643 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.020310 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.031506 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.032804 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.032849 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-kubelet\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.032873 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-conf-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.032919 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ebf810d-762c-445f-9705-a74d274fcf1b-rootfs\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.032963 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/7ebf810d-762c-445f-9705-a74d274fcf1b-rootfs\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033000 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-script-lib\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033031 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wgnn\" (UniqueName: \"kubernetes.io/projected/255485b5-2151-4328-b925-c2e1a1f95e1f-kube-api-access-4wgnn\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033057 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blcnp\" (UniqueName: \"kubernetes.io/projected/7ebf810d-762c-445f-9705-a74d274fcf1b-kube-api-access-blcnp\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033083 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-ovn\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033103 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-bin\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033123 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-multus\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033166 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033210 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033259 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-cnibin\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033280 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-daemon-config\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033297 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-netns\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033314 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0acf82d2-cb56-42c0-9762-895319b4c26c-ovn-node-metrics-cert\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033328 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033368 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-os-release\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033390 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-cni-binary-copy\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033407 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-socket-dir-parent\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033424 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-netns\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033460 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ebf810d-762c-445f-9705-a74d274fcf1b-mcd-auth-proxy-config\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033538 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-slash\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033578 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmbpv\" (UniqueName: \"kubernetes.io/projected/6e047604-a155-4166-9ab0-0577cbe70bb6-kube-api-access-hmbpv\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033607 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-etc-kubernetes\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033632 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-systemd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033657 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-k8s-cni-cncf-io\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033694 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033733 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033787 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7dhf\" (UniqueName: \"kubernetes.io/projected/0acf82d2-cb56-42c0-9762-895319b4c26c-kube-api-access-k7dhf\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033835 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-systemd-units\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033859 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-var-lib-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033878 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-etc-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033928 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ebf810d-762c-445f-9705-a74d274fcf1b-proxy-tls\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033951 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-node-log\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033973 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-netd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.033995 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034020 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-config\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034041 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-cnibin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034060 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-os-release\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034083 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-bin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034250 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-env-overrides\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034278 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7ebf810d-762c-445f-9705-a74d274fcf1b-mcd-auth-proxy-config\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034320 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-kubelet\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034357 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-log-socket\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034382 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-multus-certs\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034430 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-hostroot\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034457 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.034475 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-system-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.039266 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7ebf810d-762c-445f-9705-a74d274fcf1b-proxy-tls\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.048011 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.059981 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blcnp\" (UniqueName: \"kubernetes.io/projected/7ebf810d-762c-445f-9705-a74d274fcf1b-kube-api-access-blcnp\") pod \"machine-config-daemon-z2w9t\" (UID: \"7ebf810d-762c-445f-9705-a74d274fcf1b\") " pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.066876 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.089352 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.105585 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.120239 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.130848 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135525 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-node-log\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135564 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-netd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135582 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135604 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-config\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135624 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-env-overrides\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135643 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-cnibin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135653 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-netd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135663 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-os-release\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135686 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135711 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-bin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135742 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-kubelet\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135759 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-log-socket\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135785 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-node-log\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135826 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-cnibin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135857 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-kubelet\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135819 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-hostroot\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135838 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-bin\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135797 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-hostroot\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135918 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-multus-certs\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135940 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-os-release\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135862 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-log-socket\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135958 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-system-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135984 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-multus-certs\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.135985 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136008 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-script-lib\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136030 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136036 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136064 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-kubelet\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136088 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-conf-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136123 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wgnn\" (UniqueName: \"kubernetes.io/projected/255485b5-2151-4328-b925-c2e1a1f95e1f-kube-api-access-4wgnn\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136144 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-ovn\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136163 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-bin\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136182 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-multus\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136201 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136222 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136242 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-daemon-config\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136264 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-cnibin\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136287 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-env-overrides\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136285 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-netns\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136351 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-kubelet\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136067 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-system-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136373 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0acf82d2-cb56-42c0-9762-895319b4c26c-ovn-node-metrics-cert\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136394 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136416 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-os-release\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136438 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmbpv\" (UniqueName: \"kubernetes.io/projected/6e047604-a155-4166-9ab0-0577cbe70bb6-kube-api-access-hmbpv\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136459 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-cni-binary-copy\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136477 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-socket-dir-parent\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136498 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-netns\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136519 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-slash\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136537 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-etc-kubernetes\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136557 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-systemd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136564 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-ovn\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136563 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-config\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136627 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136385 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-conf-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136661 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-k8s-cni-cncf-io\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136668 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-socket-dir-parent\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136577 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-k8s-cni-cncf-io\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136698 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136701 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-os-release\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136703 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-run-netns\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136713 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136741 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-etc-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136787 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-ovn-kubernetes\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136786 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7dhf\" (UniqueName: \"kubernetes.io/projected/0acf82d2-cb56-42c0-9762-895319b4c26c-kube-api-access-k7dhf\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136816 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-cni-dir\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136847 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-systemd-units\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136855 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-system-cni-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136871 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-var-lib-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136885 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-host-var-lib-cni-multus\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136928 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-run-netns\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136963 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-cnibin\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137010 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-etc-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137034 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6e047604-a155-4166-9ab0-0577cbe70bb6-cni-binary-copy\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136336 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-cni-bin\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137062 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-host-slash\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137098 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/255485b5-2151-4328-b925-c2e1a1f95e1f-etc-kubernetes\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137128 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-run-systemd\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.136740 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0acf82d2-cb56-42c0-9762-895319b4c26c-ovnkube-script-lib\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137335 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-multus-daemon-config\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137360 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-systemd-units\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137381 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0acf82d2-cb56-42c0-9762-895319b4c26c-var-lib-openvswitch\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137460 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/255485b5-2151-4328-b925-c2e1a1f95e1f-cni-binary-copy\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.137719 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6e047604-a155-4166-9ab0-0577cbe70bb6-tuning-conf-dir\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.140114 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0acf82d2-cb56-42c0-9762-895319b4c26c-ovn-node-metrics-cert\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.144510 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.151072 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wgnn\" (UniqueName: \"kubernetes.io/projected/255485b5-2151-4328-b925-c2e1a1f95e1f-kube-api-access-4wgnn\") pod \"multus-wmmtr\" (UID: \"255485b5-2151-4328-b925-c2e1a1f95e1f\") " pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.151620 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7dhf\" (UniqueName: \"kubernetes.io/projected/0acf82d2-cb56-42c0-9762-895319b4c26c-kube-api-access-k7dhf\") pod \"ovnkube-node-d7d6j\" (UID: \"0acf82d2-cb56-42c0-9762-895319b4c26c\") " pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.152237 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmbpv\" (UniqueName: \"kubernetes.io/projected/6e047604-a155-4166-9ab0-0577cbe70bb6-kube-api-access-hmbpv\") pod \"multus-additional-cni-plugins-hbtwl\" (UID: \"6e047604-a155-4166-9ab0-0577cbe70bb6\") " pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.156554 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.167316 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.178586 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.181440 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:10:24 crc kubenswrapper[4776]: W0127 19:10:24.190040 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ebf810d_762c_445f_9705_a74d274fcf1b.slice/crio-d6ead0cd28283b7ed1910157b9fa49ee3ea4815b940eaa147b50f63241444ba2 WatchSource:0}: Error finding container d6ead0cd28283b7ed1910157b9fa49ee3ea4815b940eaa147b50f63241444ba2: Status 404 returned error can't find the container with id d6ead0cd28283b7ed1910157b9fa49ee3ea4815b940eaa147b50f63241444ba2 Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.191860 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-wmmtr" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.193261 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.198527 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.207290 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.210408 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: W0127 19:10:24.219434 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0acf82d2_cb56_42c0_9762_895319b4c26c.slice/crio-171fa5f81809762839d5a415c2a36c355fac9a5b618706a0b3d285b90f3a8060 WatchSource:0}: Error finding container 171fa5f81809762839d5a415c2a36c355fac9a5b618706a0b3d285b90f3a8060: Status 404 returned error can't find the container with id 171fa5f81809762839d5a415c2a36c355fac9a5b618706a0b3d285b90f3a8060 Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.229114 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.238172 4776 projected.go:288] Couldn't get configMap openshift-dns/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.238207 4776 projected.go:194] Error preparing data for projected volume kube-api-access-gf4st for pod openshift-dns/node-resolver-xnmft: failed to sync configmap cache: timed out waiting for the condition Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.238257 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st podName:a6cc042e-9fd6-43f0-8655-72c53984b85e nodeName:}" failed. No retries permitted until 2026-01-27 19:10:24.738240828 +0000 UTC m=+27.243845975 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-gf4st" (UniqueName: "kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st") pod "node-resolver-xnmft" (UID: "a6cc042e-9fd6-43f0-8655-72c53984b85e") : failed to sync configmap cache: timed out waiting for the condition Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.240258 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 02:56:52.503212959 +0000 UTC Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.251359 4776 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.252784 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.252826 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.252839 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.252969 4776 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.271134 4776 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.271510 4776 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.274423 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.274449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.274457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.274470 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.274479 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.296530 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.299340 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.299374 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.299386 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.299402 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.299414 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.321920 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.321971 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.322076 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.322161 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.337643 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.340994 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.341024 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.341032 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.341046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.341055 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.365089 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.368629 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.368653 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.368662 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.368675 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.368683 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.397491 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.397805 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.407943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.407970 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.407978 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.407991 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.407999 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.422455 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: E0127 19:10:24.422559 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.424270 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.424316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.424330 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.424350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.424362 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.438931 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.442329 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.442362 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"d6ead0cd28283b7ed1910157b9fa49ee3ea4815b940eaa147b50f63241444ba2"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.443530 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qz4kb" event={"ID":"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f","Type":"ContainerStarted","Data":"6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.445368 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerStarted","Data":"345015a35a0dd8549e39bf349ac053c13346fe63647f6cfa054ebd7512dd9636"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.447496 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155" exitCode=0 Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.447537 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.447567 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"171fa5f81809762839d5a415c2a36c355fac9a5b618706a0b3d285b90f3a8060"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.449002 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerStarted","Data":"da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.449034 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerStarted","Data":"f859598900127a0d2cb9e45ac5923fdf0c41bcb1de3f7343d0fa5d3728c8c2e8"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.461095 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.472715 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.494661 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.508938 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.522518 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.526083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.526114 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.526126 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.526143 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.526155 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.536539 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.554118 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.569233 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.583935 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.594106 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.607560 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.620714 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.629799 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.629846 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.629856 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.629872 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.629887 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.632810 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.646595 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.665582 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.679711 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.691501 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.704512 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.717558 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.730115 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.731694 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.731724 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.731734 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.731749 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.731759 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.742106 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf4st\" (UniqueName: \"kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.743415 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.745349 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf4st\" (UniqueName: \"kubernetes.io/projected/a6cc042e-9fd6-43f0-8655-72c53984b85e-kube-api-access-gf4st\") pod \"node-resolver-xnmft\" (UID: \"a6cc042e-9fd6-43f0-8655-72c53984b85e\") " pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.756069 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.769987 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.780900 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.798074 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.811783 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.815673 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xnmft" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.824196 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.834252 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.834310 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.834325 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.834365 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.834377 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.836555 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:24Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:24 crc kubenswrapper[4776]: W0127 19:10:24.893343 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6cc042e_9fd6_43f0_8655_72c53984b85e.slice/crio-77d7723ffb8d15832a489c3df5191f07862df9234271f801618684c0df46f9d4 WatchSource:0}: Error finding container 77d7723ffb8d15832a489c3df5191f07862df9234271f801618684c0df46f9d4: Status 404 returned error can't find the container with id 77d7723ffb8d15832a489c3df5191f07862df9234271f801618684c0df46f9d4 Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.936883 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.936930 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.936940 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.936954 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:24 crc kubenswrapper[4776]: I0127 19:10:24.936964 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:24Z","lastTransitionTime":"2026-01-27T19:10:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.039359 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.039432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.039456 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.039494 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.039517 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.142244 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.142282 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.142293 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.142311 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.142333 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.241447 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 23:11:34.450194046 +0000 UTC Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.244435 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.244486 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.244505 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.244524 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.244538 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.319083 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.319225 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.347266 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.347305 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.347316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.347333 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.347346 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.449760 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.449819 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.449831 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.449846 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.449858 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.456203 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xnmft" event={"ID":"a6cc042e-9fd6-43f0-8655-72c53984b85e","Type":"ContainerStarted","Data":"f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.456254 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xnmft" event={"ID":"a6cc042e-9fd6-43f0-8655-72c53984b85e","Type":"ContainerStarted","Data":"77d7723ffb8d15832a489c3df5191f07862df9234271f801618684c0df46f9d4"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.457799 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3" exitCode=0 Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.457845 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.462407 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464622 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834" exitCode=1 Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464683 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464707 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464716 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464724 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464732 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.464742 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.470128 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.474886 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.488938 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.509694 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.524016 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.543172 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.560786 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.560832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.560843 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.560859 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.560869 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.571341 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.584685 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.597466 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.608668 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.620489 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.636441 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.650689 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.662928 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.664150 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.664181 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.664190 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.664207 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.664218 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.674820 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.686331 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.697577 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.710579 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.722276 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.736331 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.751645 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.761494 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.771811 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.771855 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.771868 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.771886 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.771898 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.787462 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.804513 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.815573 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.841747 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.855263 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.855427 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:33.855404881 +0000 UTC m=+36.361010038 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.861047 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.872568 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.874060 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.874084 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.874094 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.874107 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.874117 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.886422 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:25Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.956805 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.956867 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.956903 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.956957 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957116 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957143 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957158 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957220 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:33.957200969 +0000 UTC m=+36.462806126 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957606 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957628 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957640 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957671 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:33.957661223 +0000 UTC m=+36.463266380 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957706 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957732 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:33.957725275 +0000 UTC m=+36.463330432 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957803 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: E0127 19:10:25.957843 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:33.957832218 +0000 UTC m=+36.463437375 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.976371 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.976726 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.976738 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.976756 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:25 crc kubenswrapper[4776]: I0127 19:10:25.976793 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:25Z","lastTransitionTime":"2026-01-27T19:10:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.081801 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.081843 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.081856 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.081872 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.081885 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.184566 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.184602 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.184613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.184640 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.184651 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.242291 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 00:13:10.229669627 +0000 UTC Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.287663 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.287712 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.287723 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.287741 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.287753 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.319147 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.319156 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:26 crc kubenswrapper[4776]: E0127 19:10:26.319264 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:26 crc kubenswrapper[4776]: E0127 19:10:26.319366 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.389603 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.389648 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.389659 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.389676 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.389689 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.476799 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerStarted","Data":"bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.491526 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.491569 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.491584 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.491606 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.491621 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.506394 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.527171 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.538083 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.550128 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.560317 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.576112 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.587094 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.593369 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.593404 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.593416 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.593434 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.593447 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.599095 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.610869 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.621693 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.632471 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.642102 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.651815 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.666405 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:26Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.695301 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.695338 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.695349 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.695372 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.695381 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.798872 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.798948 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.798970 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.798999 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.799021 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.901413 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.901448 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.901458 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.901474 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:26 crc kubenswrapper[4776]: I0127 19:10:26.901487 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:26Z","lastTransitionTime":"2026-01-27T19:10:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.004434 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.004898 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.005083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.005224 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.005341 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.107670 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.107709 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.107718 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.107734 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.107747 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.210120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.210449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.210461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.210477 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.210489 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.243254 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 19:05:23.588121993 +0000 UTC Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.313649 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.313690 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.313702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.313728 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.313741 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.319070 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:27 crc kubenswrapper[4776]: E0127 19:10:27.319171 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.416854 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.416894 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.416905 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.416920 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.416930 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.489696 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.490810 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.492938 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94" exitCode=0 Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.493004 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.507070 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.519785 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.522345 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.522383 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.522398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.522417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.522429 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.541473 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.558614 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.572028 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.585648 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.598122 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.609540 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.620639 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.625047 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.625085 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.625096 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.625112 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.625123 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.633758 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.649011 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.662556 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.672239 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.680835 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:27Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.727028 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.727061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.727071 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.727086 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.727097 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.829507 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.829541 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.829551 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.829565 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.829575 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.933214 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.933258 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.933274 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.933290 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:27 crc kubenswrapper[4776]: I0127 19:10:27.933301 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:27Z","lastTransitionTime":"2026-01-27T19:10:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.035225 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.035274 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.035283 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.035296 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.035307 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.057458 4776 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.137944 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.137974 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.137983 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.137996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.138006 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.240484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.240509 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.240517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.240530 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.240538 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.243899 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 07:11:31.993258432 +0000 UTC Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.319685 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:28 crc kubenswrapper[4776]: E0127 19:10:28.319941 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.327261 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:28 crc kubenswrapper[4776]: E0127 19:10:28.330412 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.342926 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.344326 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.344379 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.344404 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.344432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.344454 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.359721 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.376007 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.391661 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.400921 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.418910 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.433681 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.445393 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.447201 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.447241 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.447253 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.447269 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.447280 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.458741 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.471200 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.482535 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.493617 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.499002 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e" exitCode=0 Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.499053 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.507087 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.521482 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.534791 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.546797 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.549543 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.549575 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.549585 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.549601 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.549614 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.560113 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.570920 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.592500 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.606945 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.620729 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.631350 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.642168 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.651996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.652031 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.652044 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.652066 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.652084 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.654010 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.665640 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.676299 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.690362 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.701948 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:28Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.753906 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.754099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.754424 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.754520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.754618 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.857894 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.857961 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.857985 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.858019 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.858043 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.961208 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.961250 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.961262 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.961279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:28 crc kubenswrapper[4776]: I0127 19:10:28.961289 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:28Z","lastTransitionTime":"2026-01-27T19:10:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.063289 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.063554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.063624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.063692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.063747 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.166914 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.167265 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.167353 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.167439 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.167498 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.244505 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 00:40:51.456298088 +0000 UTC Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.270582 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.270654 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.270672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.270694 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.270712 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.319015 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:29 crc kubenswrapper[4776]: E0127 19:10:29.319497 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.373949 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.374018 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.374041 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.374069 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.374089 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.477481 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.477543 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.477562 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.477586 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.477604 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.510206 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881" exitCode=0 Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.510237 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.532125 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.551455 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.578853 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.580384 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.580510 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.580526 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.580546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.580559 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.593201 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.605399 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.622142 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.637003 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.653131 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.666395 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.678170 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.683452 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.683483 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.683494 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.683510 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.683521 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.689257 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.698156 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.707148 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.721860 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:29Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.786385 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.786425 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.786433 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.786446 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.786454 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.888238 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.888274 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.888283 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.888296 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.888306 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.990352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.990381 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.990389 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.990404 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:29 crc kubenswrapper[4776]: I0127 19:10:29.990414 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:29Z","lastTransitionTime":"2026-01-27T19:10:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.093000 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.093035 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.093046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.093062 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.093073 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.196079 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.196128 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.196144 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.196168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.196187 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.244979 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 16:48:33.075298476 +0000 UTC Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.298512 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.298550 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.298565 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.298581 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.298592 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.318760 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.318823 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:30 crc kubenswrapper[4776]: E0127 19:10:30.318886 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:30 crc kubenswrapper[4776]: E0127 19:10:30.319187 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.401280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.401309 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.401320 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.401336 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.401347 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.504144 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.504437 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.504574 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.504707 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.504881 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.518139 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerStarted","Data":"70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.525028 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.526138 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.526545 4776 scope.go:117] "RemoveContainer" containerID="3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.526945 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.527050 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.535151 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.546886 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.559898 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.562927 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.564575 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.578974 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.590544 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.607419 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.607470 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.607488 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.607513 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.607530 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.616248 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.632090 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.645291 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.660641 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.674861 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.688532 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.703224 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.710477 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.710520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.710532 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.710548 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.710563 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.719126 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.733064 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.748005 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.758580 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.775426 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.789540 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.802673 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.812208 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.812231 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.812238 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.812250 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.812259 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.818278 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.831636 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.851863 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.864206 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.876092 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.890814 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.902661 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.910888 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.914124 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.914259 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.914512 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.914689 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.914789 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:30Z","lastTransitionTime":"2026-01-27T19:10:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:30 crc kubenswrapper[4776]: I0127 19:10:30.921070 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:30Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.016832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.016876 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.016886 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.016898 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.016906 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.119604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.119671 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.119698 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.119727 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.119750 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.222617 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.222678 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.222706 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.222735 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.222815 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.245143 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 22:16:58.459832974 +0000 UTC Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.319488 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:31 crc kubenswrapper[4776]: E0127 19:10:31.319609 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.325738 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.325853 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.325881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.325913 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.325937 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.428259 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.428595 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.428615 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.428644 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.428662 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.531224 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.531279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.531301 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.531323 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.531340 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.535153 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.536017 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.536237 4776 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.542171 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad" exitCode=0 Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.542391 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.561459 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.582470 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.600155 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.617086 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.632266 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.633584 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.633619 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.633632 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.633649 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.633660 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.646851 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.660883 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.677762 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.697921 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.709232 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.720881 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.733653 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.737074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.737109 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.737120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.737140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.737156 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.743025 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.766634 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.802216 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.832531 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.838929 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.838967 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.838979 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.838996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.839007 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.845292 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.857134 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.866791 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.882169 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.895630 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.908928 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.924676 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941705 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941745 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941755 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941786 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941798 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:31Z","lastTransitionTime":"2026-01-27T19:10:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.941920 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.955738 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.966719 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:31 crc kubenswrapper[4776]: I0127 19:10:31.986795 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:31Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.005425 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.044024 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.044138 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.044158 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.044182 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.044199 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.147692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.147759 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.147812 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.147836 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.147853 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.246156 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 07:13:32.716814947 +0000 UTC Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.251450 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.251514 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.251533 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.251560 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.251578 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.318753 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.318882 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:32 crc kubenswrapper[4776]: E0127 19:10:32.319003 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:32 crc kubenswrapper[4776]: E0127 19:10:32.319165 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.354532 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.354568 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.354577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.354592 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.354602 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.457981 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.458048 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.458074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.458107 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.458132 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.549370 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e047604-a155-4166-9ab0-0577cbe70bb6" containerID="ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0" exitCode=0 Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.549433 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerDied","Data":"ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.549560 4776 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.560505 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.560577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.560605 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.560638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.560659 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.563357 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.576179 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.586113 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.597617 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.609083 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.625598 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.640728 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.655720 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.664136 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.664182 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.664231 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.664250 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.664264 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.671939 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.688966 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.701823 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.715950 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.734914 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.748654 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:32Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.765982 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.766021 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.766033 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.766048 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.766059 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.869005 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.869055 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.869069 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.869094 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.869110 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.971382 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.971411 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.971419 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.971433 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:32 crc kubenswrapper[4776]: I0127 19:10:32.971442 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:32Z","lastTransitionTime":"2026-01-27T19:10:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.074377 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.074432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.074448 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.074475 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.074494 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.182282 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.182359 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.182563 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.182657 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.182686 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.246536 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 17:45:54.88036075 +0000 UTC Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.286636 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.286665 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.286676 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.286693 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.286702 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.319669 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.319862 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.389326 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.389381 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.389395 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.389413 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.389425 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.491667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.491704 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.491715 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.491744 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.491755 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.532907 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.558727 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" event={"ID":"6e047604-a155-4166-9ab0-0577cbe70bb6","Type":"ContainerStarted","Data":"81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.561305 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/0.log" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.565186 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.566740 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a" exitCode=1 Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.566843 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.567594 4776 scope.go:117] "RemoveContainer" containerID="f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.583743 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.594616 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.594682 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.594702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.594727 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.594745 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.599296 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.613631 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.630607 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.647254 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.661252 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.673966 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.690348 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.697879 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.697918 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.697928 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.697943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.697955 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.703041 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.715254 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.730210 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.747501 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.758784 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.784101 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801045 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801580 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801615 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801625 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801641 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.801652 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.812356 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.830421 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"message\\\":\\\"amespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.411188 6031 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0127 19:10:33.411695 6031 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.411815 6031 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412026 6031 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412388 6031 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412662 6031 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0127 19:10:33.412686 6031 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0127 19:10:33.412745 6031 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:33.412783 6031 handler.go:208] Removed *v1.Node event handler 2\\\\nI0127 19:10:33.412799 6031 factory.go:656] Stopping watch factory\\\\nI0127 19:10:33.412813 6031 handler.go:208] Removed *v1.Node event handler 7\\\\nI0127 19:10:33.412821 6031 ovnkube.go:599] Stopped ovnkube\\\\nI0127 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.842253 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.853323 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.862452 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.862636 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:10:49.862613904 +0000 UTC m=+52.368219081 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.867865 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.883850 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.897583 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.903350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.903382 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.903390 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.903403 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.903413 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:33Z","lastTransitionTime":"2026-01-27T19:10:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.909603 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.923392 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.945295 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.959889 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.963152 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.963203 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.963224 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.963268 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963331 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963399 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:49.963387959 +0000 UTC m=+52.468993096 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963406 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963426 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963438 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963471 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:49.963459522 +0000 UTC m=+52.469064669 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963331 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963491 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963500 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963526 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:49.963518174 +0000 UTC m=+52.469123321 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963613 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: E0127 19:10:33.963701 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:49.963675119 +0000 UTC m=+52.469280326 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.976069 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:33 crc kubenswrapper[4776]: I0127 19:10:33.994662 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:33Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.008049 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.008088 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.008104 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.008120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.008130 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.110338 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.110394 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.110407 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.110426 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.110438 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.214142 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.214179 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.214189 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.214206 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.214216 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.247370 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 10:44:51.635418485 +0000 UTC Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.316411 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.316450 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.316461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.316480 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.316493 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.319266 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.319502 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.319326 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.319861 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.419146 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.419202 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.419217 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.419237 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.419255 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.531753 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.531890 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.531946 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.532018 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.532072 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.572431 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/1.log" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.573468 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/0.log" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.575388 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.576140 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5" exitCode=1 Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.576181 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.576245 4776 scope.go:117] "RemoveContainer" containerID="f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.577466 4776 scope.go:117] "RemoveContainer" containerID="4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.579118 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.590462 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.620121 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f9325416a9fd22dfe571db44d18d012de1ca1b03d95538510ac4c401312bc56a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"message\\\":\\\"amespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.411188 6031 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0127 19:10:33.411695 6031 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.411815 6031 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412026 6031 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412388 6031 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:33.412662 6031 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0127 19:10:33.412686 6031 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0127 19:10:33.412745 6031 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:33.412783 6031 handler.go:208] Removed *v1.Node event handler 2\\\\nI0127 19:10:33.412799 6031 factory.go:656] Stopping watch factory\\\\nI0127 19:10:33.412813 6031 handler.go:208] Removed *v1.Node event handler 7\\\\nI0127 19:10:33.412821 6031 ovnkube.go:599] Stopped ovnkube\\\\nI0127 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.635440 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.635480 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.635492 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.635693 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.635705 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.637961 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.652668 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.673886 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.691388 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.709815 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.723031 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.737948 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.738173 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.738200 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.738212 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.738230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.738242 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.756156 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.773422 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.780215 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.780252 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.780263 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.780279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.780289 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.789398 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.797135 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.799060 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.801890 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.801934 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.801951 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.801973 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.801989 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.815397 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.820233 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.823539 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.823576 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.823585 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.823599 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.823609 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.839538 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.843747 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.843819 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.843834 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.843852 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.843865 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.858687 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.862241 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.862281 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.862291 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.862305 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.862315 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.874897 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:34Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:34 crc kubenswrapper[4776]: E0127 19:10:34.875015 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.876674 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.876714 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.876723 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.876736 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.876747 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.978693 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.978732 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.978742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.978756 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:34 crc kubenswrapper[4776]: I0127 19:10:34.978779 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:34Z","lastTransitionTime":"2026-01-27T19:10:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.081131 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.081171 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.081181 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.081194 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.081204 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.184277 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.184315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.184325 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.184351 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.184361 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.248437 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 06:30:39.185704296 +0000 UTC Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.286667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.286728 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.286737 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.286750 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.286760 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.319133 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:35 crc kubenswrapper[4776]: E0127 19:10:35.319332 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.388746 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.388862 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.388881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.388905 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.388924 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.492029 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.492110 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.492132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.492157 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.492174 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.585574 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/1.log" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.593095 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.594414 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.594457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.594474 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.594491 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.594505 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.595113 4776 scope.go:117] "RemoveContainer" containerID="4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5" Jan 27 19:10:35 crc kubenswrapper[4776]: E0127 19:10:35.595277 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.610061 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.624638 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.636654 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.651967 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.665968 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.682717 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.696602 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.696641 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.696651 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.696665 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.696704 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.700948 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.716103 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.728854 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.753659 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.769361 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.785737 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.800250 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.800303 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.800318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.800338 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.800352 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.807288 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.829905 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:35Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.904046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.904137 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.904162 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.904197 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:35 crc kubenswrapper[4776]: I0127 19:10:35.904220 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:35Z","lastTransitionTime":"2026-01-27T19:10:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.006424 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.006673 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.006783 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.006875 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.006963 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.109638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.109685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.109692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.109706 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.109715 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.174611 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg"] Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.175221 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.177032 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.177152 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.194081 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.207917 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.212139 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.212173 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.212186 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.212214 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.212230 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.222807 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.237209 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.248720 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 10:10:26.843201242 +0000 UTC Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.249006 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.265075 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.284646 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.284720 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.284843 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38523e9f-4493-4055-83b7-9e70ce801c8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.284930 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwn9r\" (UniqueName: \"kubernetes.io/projected/38523e9f-4493-4055-83b7-9e70ce801c8c-kube-api-access-wwn9r\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.286443 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.300325 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.314802 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.314849 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.314862 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.314884 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.314898 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.317117 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.319313 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.319313 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:36 crc kubenswrapper[4776]: E0127 19:10:36.319456 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:36 crc kubenswrapper[4776]: E0127 19:10:36.319648 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.333596 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.347382 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.364439 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.379619 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.385599 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.385706 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.385806 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38523e9f-4493-4055-83b7-9e70ce801c8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.385856 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwn9r\" (UniqueName: \"kubernetes.io/projected/38523e9f-4493-4055-83b7-9e70ce801c8c-kube-api-access-wwn9r\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.386428 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.386658 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/38523e9f-4493-4055-83b7-9e70ce801c8c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.398499 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/38523e9f-4493-4055-83b7-9e70ce801c8c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.406392 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwn9r\" (UniqueName: \"kubernetes.io/projected/38523e9f-4493-4055-83b7-9e70ce801c8c-kube-api-access-wwn9r\") pod \"ovnkube-control-plane-749d76644c-wqrsg\" (UID: \"38523e9f-4493-4055-83b7-9e70ce801c8c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.410758 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.416993 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.417044 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.417057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.417074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.417086 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.425678 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:36Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.496591 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" Jan 27 19:10:36 crc kubenswrapper[4776]: W0127 19:10:36.514860 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38523e9f_4493_4055_83b7_9e70ce801c8c.slice/crio-55253f53e06c7e1c4b69726bde49a65ea33f7bfc6e2f2c266dccbf0b66ff5a13 WatchSource:0}: Error finding container 55253f53e06c7e1c4b69726bde49a65ea33f7bfc6e2f2c266dccbf0b66ff5a13: Status 404 returned error can't find the container with id 55253f53e06c7e1c4b69726bde49a65ea33f7bfc6e2f2c266dccbf0b66ff5a13 Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.519367 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.519405 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.519417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.519438 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.519450 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.599878 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" event={"ID":"38523e9f-4493-4055-83b7-9e70ce801c8c","Type":"ContainerStarted","Data":"55253f53e06c7e1c4b69726bde49a65ea33f7bfc6e2f2c266dccbf0b66ff5a13"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.621933 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.621994 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.622016 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.622045 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.622069 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.723991 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.724046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.724064 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.724086 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.724103 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.827406 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.827461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.827477 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.827506 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.827522 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.931520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.931562 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.931577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.931595 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:36 crc kubenswrapper[4776]: I0127 19:10:36.931608 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:36Z","lastTransitionTime":"2026-01-27T19:10:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.035412 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.035481 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.035507 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.035532 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.035549 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.139353 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.139397 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.139408 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.139427 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.139440 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.242665 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.242732 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.242757 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.242823 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.242846 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.249999 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 19:08:38.381037535 +0000 UTC Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.319520 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:37 crc kubenswrapper[4776]: E0127 19:10:37.319665 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.345097 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.345142 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.345154 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.345171 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.345183 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.447604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.447651 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.447665 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.447683 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.447699 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.549941 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.550001 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.550016 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.550033 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.550044 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.609994 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" event={"ID":"38523e9f-4493-4055-83b7-9e70ce801c8c","Type":"ContainerStarted","Data":"f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.610060 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" event={"ID":"38523e9f-4493-4055-83b7-9e70ce801c8c","Type":"ContainerStarted","Data":"bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.626530 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.644887 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.652554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.652601 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.652613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.652631 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.652643 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.662357 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.680409 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.697687 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.709464 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.725692 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.734368 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.746062 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.755070 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.755115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.755127 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.755144 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.755156 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.759661 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.769155 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.778194 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.787414 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.805055 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.819021 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:37Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.857710 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.857750 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.857787 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.857832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.857844 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.960802 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.960855 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.960873 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.960897 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:37 crc kubenswrapper[4776]: I0127 19:10:37.960916 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:37Z","lastTransitionTime":"2026-01-27T19:10:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.063254 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.063327 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.063372 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.063398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.063417 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.078985 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-t7tbg"] Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.079399 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.079457 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.090649 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.098430 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.098460 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gszlp\" (UniqueName: \"kubernetes.io/projected/28a63e45-994f-41fe-82fb-605486d13b98-kube-api-access-gszlp\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.116486 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.128219 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.146252 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.165609 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.165675 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.165696 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.165728 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.165750 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.170786 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.193907 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.198984 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.199050 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gszlp\" (UniqueName: \"kubernetes.io/projected/28a63e45-994f-41fe-82fb-605486d13b98-kube-api-access-gszlp\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.199176 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.199273 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:38.699248228 +0000 UTC m=+41.204853395 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.220356 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gszlp\" (UniqueName: \"kubernetes.io/projected/28a63e45-994f-41fe-82fb-605486d13b98-kube-api-access-gszlp\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.225651 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.237467 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.247810 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.250419 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 23:12:24.524150008 +0000 UTC Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.259687 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.268276 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.268316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.268328 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.268344 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.268355 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.270221 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.280880 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.296079 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.306036 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.317644 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.318798 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.318927 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.319054 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.319160 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.332894 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.344729 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.354135 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.365227 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.369806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.369862 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.369876 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.369892 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.369905 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.376782 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.398055 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.410747 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.423208 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.438856 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.451432 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.471073 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.472078 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.472109 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.472120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.472138 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.472150 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.482420 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.493526 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.509366 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.521748 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.536563 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.549793 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:38Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.575057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.575111 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.575128 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.575152 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.575167 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.677830 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.677879 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.677888 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.677904 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.677915 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.704217 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.704406 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:38 crc kubenswrapper[4776]: E0127 19:10:38.704476 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:39.704458788 +0000 UTC m=+42.210063935 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.780157 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.780199 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.780217 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.780233 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.780245 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.882851 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.882902 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.882915 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.882934 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.882948 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.986432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.986536 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.986554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.986578 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:38 crc kubenswrapper[4776]: I0127 19:10:38.986596 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:38Z","lastTransitionTime":"2026-01-27T19:10:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.088864 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.088907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.088921 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.088942 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.088956 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.191874 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.192214 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.192240 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.192292 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.195432 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.250976 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 02:27:50.07391998 +0000 UTC Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.298576 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.298642 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.298660 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.298725 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.298747 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.319397 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.319403 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:39 crc kubenswrapper[4776]: E0127 19:10:39.319636 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:39 crc kubenswrapper[4776]: E0127 19:10:39.319991 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.401525 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.401579 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.401598 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.401624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.401642 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.504341 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.504378 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.504388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.504402 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.504410 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.607549 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.607614 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.607633 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.607661 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.607678 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.709826 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.709863 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.709871 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.709884 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.709892 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.712520 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:39 crc kubenswrapper[4776]: E0127 19:10:39.712731 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:39 crc kubenswrapper[4776]: E0127 19:10:39.712913 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:41.712877787 +0000 UTC m=+44.218482974 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.812715 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.812827 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.812854 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.812883 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.812907 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.916272 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.916339 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.916363 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.916393 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:39 crc kubenswrapper[4776]: I0127 19:10:39.916416 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:39Z","lastTransitionTime":"2026-01-27T19:10:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.019348 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.019389 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.019399 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.019415 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.019425 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.122481 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.122538 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.122549 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.122575 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.122587 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.225743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.225828 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.225848 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.225875 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.225893 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.251412 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 22:30:28.816979367 +0000 UTC Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.319158 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.319232 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:40 crc kubenswrapper[4776]: E0127 19:10:40.319324 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:40 crc kubenswrapper[4776]: E0127 19:10:40.319456 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.328880 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.328947 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.328971 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.329003 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.329025 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.432332 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.432397 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.432420 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.432463 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.432485 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.535142 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.535213 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.535222 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.535237 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.535246 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.638106 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.638148 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.638157 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.638172 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.638181 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.740739 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.740843 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.740867 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.740899 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.740920 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.844104 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.844146 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.844156 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.844177 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.844186 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.947099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.947161 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.947185 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.947213 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:40 crc kubenswrapper[4776]: I0127 19:10:40.947239 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:40Z","lastTransitionTime":"2026-01-27T19:10:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.049918 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.049989 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.050014 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.050041 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.050062 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.152343 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.152374 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.152383 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.152398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.152406 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.251983 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:44:46.98434653 +0000 UTC Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.255178 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.255230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.255246 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.255267 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.255283 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.319610 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.319726 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:41 crc kubenswrapper[4776]: E0127 19:10:41.319840 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:41 crc kubenswrapper[4776]: E0127 19:10:41.320067 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.358358 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.358424 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.358435 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.358456 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.358470 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.460859 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.460902 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.460913 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.460930 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.460940 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.563753 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.563806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.563817 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.563831 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.563840 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.666490 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.666547 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.666562 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.666584 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.666601 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.736229 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:41 crc kubenswrapper[4776]: E0127 19:10:41.736526 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:41 crc kubenswrapper[4776]: E0127 19:10:41.736666 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:45.736630933 +0000 UTC m=+48.242236110 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.769813 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.769856 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.769866 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.769881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.769891 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.873074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.873119 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.873137 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.873159 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.873176 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.976194 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.976280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.976340 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.976377 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:41 crc kubenswrapper[4776]: I0127 19:10:41.976400 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:41Z","lastTransitionTime":"2026-01-27T19:10:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.079497 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.079544 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.079556 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.079572 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.079581 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.182192 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.182227 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.182239 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.182255 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.182267 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.252447 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 19:03:49.913031367 +0000 UTC Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.284633 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.284672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.284681 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.284696 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.284705 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.319152 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.319244 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:42 crc kubenswrapper[4776]: E0127 19:10:42.319344 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:42 crc kubenswrapper[4776]: E0127 19:10:42.319378 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.387252 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.387315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.387331 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.387350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.387364 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.490329 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.490380 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.490396 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.490416 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.490430 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.593359 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.593440 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.593463 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.593491 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.593509 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.696302 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.696350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.696362 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.696378 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.696390 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.799874 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.799924 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.799938 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.799955 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.799967 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.903005 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.903099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.903129 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.903154 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:42 crc kubenswrapper[4776]: I0127 19:10:42.903175 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:42Z","lastTransitionTime":"2026-01-27T19:10:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.006754 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.006917 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.006943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.007009 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.007042 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.110806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.110863 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.110882 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.110909 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.110928 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.213672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.213716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.213727 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.213746 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.213795 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.253519 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 04:19:13.203030456 +0000 UTC Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.316725 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.316789 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.316800 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.316816 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.316825 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.318994 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.318994 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:43 crc kubenswrapper[4776]: E0127 19:10:43.319142 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:43 crc kubenswrapper[4776]: E0127 19:10:43.319227 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.418914 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.419000 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.419038 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.419068 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.419089 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.521628 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.521671 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.521684 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.521698 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.521707 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.624538 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.624618 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.624636 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.624662 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.624680 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.727209 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.727268 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.727288 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.727311 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.727327 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.830447 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.830493 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.830507 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.830523 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.830533 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.932885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.932954 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.932967 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.932983 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:43 crc kubenswrapper[4776]: I0127 19:10:43.932996 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:43Z","lastTransitionTime":"2026-01-27T19:10:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.035536 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.035582 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.035593 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.035613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.035625 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.138624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.138670 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.138681 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.138696 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.138708 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.241601 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.241731 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.241749 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.241814 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.241834 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.253921 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 06:45:52.090606978 +0000 UTC Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.319526 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.319575 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:44 crc kubenswrapper[4776]: E0127 19:10:44.319727 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:44 crc kubenswrapper[4776]: E0127 19:10:44.319903 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.344579 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.344631 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.344649 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.344669 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.344686 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.447396 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.447462 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.447496 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.447536 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.447561 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.549902 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.549985 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.550009 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.550036 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.550057 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.652671 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.652730 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.652743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.652762 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.652807 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.756293 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.756371 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.756396 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.756426 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.756448 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.860553 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.860654 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.860680 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.860711 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.860734 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.962948 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.963006 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.963025 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.963045 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:44 crc kubenswrapper[4776]: I0127 19:10:44.963058 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:44Z","lastTransitionTime":"2026-01-27T19:10:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.065529 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.065575 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.065588 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.065608 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.065631 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.125601 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.125685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.125714 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.125745 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.125806 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.149337 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:45Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.154806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.154885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.154911 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.154944 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.154968 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.180170 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:45Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.185837 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.185884 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.185901 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.185921 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.185936 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.201663 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:45Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.205634 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.205700 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.205712 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.205730 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.205741 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.218848 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:45Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.223538 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.223589 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.223604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.223628 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.223643 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.239364 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:45Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.239517 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.241353 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.241413 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.241431 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.241457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.241475 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.254445 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 12:09:35.149224342 +0000 UTC Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.319590 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.319615 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.319734 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.319983 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.343291 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.343334 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.343348 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.343365 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.343379 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.446342 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.446556 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.446589 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.446617 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.446635 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.549257 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.549306 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.549317 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.549334 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.549345 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.652506 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.652570 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.652583 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.652612 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.652627 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.755439 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.755485 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.755496 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.755513 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.755521 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.780097 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.780251 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:45 crc kubenswrapper[4776]: E0127 19:10:45.780317 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:10:53.780300603 +0000 UTC m=+56.285905750 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.860695 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.860896 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.860911 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.860926 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.860937 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.963700 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.963748 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.963764 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.963820 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:45 crc kubenswrapper[4776]: I0127 19:10:45.963836 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:45Z","lastTransitionTime":"2026-01-27T19:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.067098 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.068406 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.069076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.069095 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.069105 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.172820 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.172885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.172901 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.172927 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.172941 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.255218 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 20:34:41.567414589 +0000 UTC Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.275915 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.275984 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.276004 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.276029 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.276049 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.319611 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.319720 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:46 crc kubenswrapper[4776]: E0127 19:10:46.319824 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:46 crc kubenswrapper[4776]: E0127 19:10:46.319885 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.379172 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.379237 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.379254 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.379278 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.379295 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.482297 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.482371 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.482391 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.482414 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.482434 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.585629 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.585742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.585797 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.585838 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.585858 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.687976 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.688057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.688082 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.688113 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.688135 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.790590 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.790656 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.790672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.791322 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.791397 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.895314 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.895368 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.895380 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.895398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.895410 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.998029 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.998076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.998084 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.998098 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:46 crc kubenswrapper[4776]: I0127 19:10:46.998106 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:46Z","lastTransitionTime":"2026-01-27T19:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.100859 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.100956 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.100974 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.100999 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.101018 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.204582 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.204638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.204654 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.204677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.204694 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.255345 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 22:06:03.694788841 +0000 UTC Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.307740 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.307947 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.307973 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.307996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.308012 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.319195 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.319232 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:47 crc kubenswrapper[4776]: E0127 19:10:47.319350 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:47 crc kubenswrapper[4776]: E0127 19:10:47.319522 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.411316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.411365 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.411379 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.411398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.411411 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.514546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.514585 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.514598 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.514616 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.514628 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.617204 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.617255 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.617285 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.617325 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.617349 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.720497 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.720562 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.720576 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.720594 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.720607 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.824230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.824298 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.824312 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.824353 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.824367 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.926415 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.926457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.926467 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.926483 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:47 crc kubenswrapper[4776]: I0127 19:10:47.926494 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:47Z","lastTransitionTime":"2026-01-27T19:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.029386 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.029436 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.029447 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.029465 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.029478 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.132727 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.132868 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.132900 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.132932 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.132955 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.236625 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.236694 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.236717 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.236741 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.236759 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.256047 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 17:52:57.393653197 +0000 UTC Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.318839 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:48 crc kubenswrapper[4776]: E0127 19:10:48.318969 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.319003 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:48 crc kubenswrapper[4776]: E0127 19:10:48.319084 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.335282 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.338685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.338787 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.338865 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.338927 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.338984 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.348217 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.360525 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.374354 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.387099 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.401374 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.415282 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.431680 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.445925 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.446295 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.446484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.446634 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.446903 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.447385 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.460645 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.470241 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.481011 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.490961 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.503295 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.511321 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.527701 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:48Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.549061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.549281 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.549357 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.549425 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.549492 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.651813 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.651887 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.651911 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.651940 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.651959 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.754510 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.754575 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.754594 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.754621 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.754640 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.857384 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.857449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.857472 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.857503 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.857526 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.960975 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.961051 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.961074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.961120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:48 crc kubenswrapper[4776]: I0127 19:10:48.961141 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:48Z","lastTransitionTime":"2026-01-27T19:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.064300 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.064375 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.064402 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.064434 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.064499 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.168040 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.168099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.168116 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.168139 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.168157 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.257180 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 03:16:28.260358128 +0000 UTC Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.270577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.270718 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.270743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.270811 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.270833 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.318908 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.318908 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:49 crc kubenswrapper[4776]: E0127 19:10:49.319104 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:49 crc kubenswrapper[4776]: E0127 19:10:49.319316 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.320140 4776 scope.go:117] "RemoveContainer" containerID="4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.373895 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.374211 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.374229 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.374254 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.374271 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.477715 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.477832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.477857 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.477887 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.477909 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.580440 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.580475 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.580484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.580498 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.580507 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.650477 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/1.log" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.653547 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.654588 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.655229 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.673866 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690240 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690329 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690431 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690615 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.690960 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.705325 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.722218 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.737553 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.757185 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.774718 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.785100 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.793224 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.793264 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.793278 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.793300 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.793315 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.804466 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.817440 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.835229 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.852360 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.870351 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.894205 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.896092 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.896119 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.896132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.896148 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.896161 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.912072 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.925942 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:10:49 crc kubenswrapper[4776]: E0127 19:10:49.926161 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:11:21.926142353 +0000 UTC m=+84.431747500 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.933817 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.986224 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.995919 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.998050 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.998086 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.998104 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.998125 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:49 crc kubenswrapper[4776]: I0127 19:10:49.998140 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:49Z","lastTransitionTime":"2026-01-27T19:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.000752 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:49Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.016270 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.027341 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.027376 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.027397 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.027420 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027539 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027553 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027563 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027597 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:22.02758607 +0000 UTC m=+84.533191217 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027846 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027861 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027868 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027892 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:22.02788519 +0000 UTC m=+84.533490337 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027920 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027938 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:22.027933661 +0000 UTC m=+84.533538808 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027972 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.027990 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:22.027984863 +0000 UTC m=+84.533590010 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.031830 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.044598 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.060929 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.079923 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.091048 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.100174 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.100207 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.100216 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.100231 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.100241 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.103652 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.113874 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.126008 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.135598 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.145507 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.154702 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.170992 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.182039 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.190621 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.202462 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.202704 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.202826 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.202903 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.202968 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.257751 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 02:19:59.338501687 +0000 UTC Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.306040 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.306076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.306085 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.306099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.306108 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.319471 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.319614 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.319624 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.319687 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.408716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.408759 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.408790 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.408806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.408821 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.512259 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.512310 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.512329 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.512352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.512369 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.614691 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.614842 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.614866 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.614898 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.614919 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.664268 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/2.log" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.665353 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/1.log" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.669473 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.671036 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" exitCode=1 Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.671154 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.671251 4776 scope.go:117] "RemoveContainer" containerID="4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.671848 4776 scope.go:117] "RemoveContainer" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" Jan 27 19:10:50 crc kubenswrapper[4776]: E0127 19:10:50.672154 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.696367 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.716472 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.717046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.717071 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.717083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.717100 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.717112 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.732437 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.747145 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.759885 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.775997 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.788685 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.802565 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.812583 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.818911 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.818943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.818953 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.818968 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.818979 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.824135 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.834449 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.850376 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.863902 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.877968 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.893131 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.903459 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.922388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.922422 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.922432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.922449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.922460 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:50Z","lastTransitionTime":"2026-01-27T19:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:50 crc kubenswrapper[4776]: I0127 19:10:50.926585 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4adfb78041909d8253dd7957ebd6f72940355eefc185d8988497ec0e6d8601d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:34Z\\\",\\\"message\\\":\\\" Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.473823 6239 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0127 19:10:34.474127 6239 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0127 19:10:34.474414 6239 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:10:34.474452 6239 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0127 19:10:34.474459 6239 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0127 19:10:34.474475 6239 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0127 19:10:34.474499 6239 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:10:34.474530 6239 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0127 19:10:34.474547 6239 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0127 19:10:34.474554 6239 factory.go:656] Stopping watch factory\\\\nI0127 19:10:34.474556 6239 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0127 19:10:34.474568 6239 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0127 19:10:34.474570 6239 ovnkube.go:599] Stopped ovnkube\\\\nI0127 19:10:3\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:50Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.024318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.024350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.024359 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.024371 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.024380 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.127279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.127307 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.127316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.127328 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.127336 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.230504 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.230973 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.231131 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.231297 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.231433 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.259128 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 22:17:32.561547225 +0000 UTC Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.319212 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:51 crc kubenswrapper[4776]: E0127 19:10:51.319659 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.319216 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:51 crc kubenswrapper[4776]: E0127 19:10:51.320036 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.334143 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.334194 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.334218 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.334252 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.334272 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.437025 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.437097 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.437119 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.437149 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.437172 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.540244 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.540294 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.540315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.540349 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.540363 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.643074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.643115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.643126 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.643151 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.643163 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.678191 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/2.log" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.681031 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.682400 4776 scope.go:117] "RemoveContainer" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" Jan 27 19:10:51 crc kubenswrapper[4776]: E0127 19:10:51.682538 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.699263 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.715499 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.732410 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.745403 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.745453 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.745470 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.745494 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.745510 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.747086 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.764096 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.784161 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.802329 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.819482 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.835975 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.849830 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.852133 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.852183 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.852202 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.852225 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.852243 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.863509 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.874650 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.891434 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.903156 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.916298 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.927355 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.944706 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:51Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.954543 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.954821 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.954926 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.955037 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:51 crc kubenswrapper[4776]: I0127 19:10:51.955248 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:51Z","lastTransitionTime":"2026-01-27T19:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.057097 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.057125 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.057132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.057146 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.057155 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.159961 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.160308 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.160499 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.160660 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.160832 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.260334 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 22:52:08.882881865 +0000 UTC Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.264228 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.264473 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.264626 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.264853 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.265086 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.319193 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.319221 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:52 crc kubenswrapper[4776]: E0127 19:10:52.319370 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:52 crc kubenswrapper[4776]: E0127 19:10:52.319586 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.367517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.367559 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.367567 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.367581 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.367589 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.470956 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.471238 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.471364 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.471485 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.471645 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.573728 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.573852 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.573876 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.573909 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.573932 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.676281 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.676587 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.676685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.676816 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.676902 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.779656 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.779922 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.779999 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.780213 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.780290 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.883007 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.883075 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.883098 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.883128 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.883153 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.986510 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.986574 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.986592 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.986618 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:52 crc kubenswrapper[4776]: I0127 19:10:52.986635 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:52Z","lastTransitionTime":"2026-01-27T19:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.089953 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.090012 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.090031 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.090055 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.090075 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.192418 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.192475 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.192490 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.192511 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.192526 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.261161 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 09:35:14.472076563 +0000 UTC Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.294758 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.295178 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.295352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.295526 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.295692 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.319234 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:53 crc kubenswrapper[4776]: E0127 19:10:53.319371 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.319548 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:53 crc kubenswrapper[4776]: E0127 19:10:53.319877 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.397844 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.398105 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.398197 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.398289 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.398376 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.500804 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.500879 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.500897 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.500919 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.500933 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.604507 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.604535 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.604546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.604561 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.604571 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.707229 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.707311 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.707332 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.707356 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.707376 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.810658 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.810702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.810713 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.810730 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.810741 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.868547 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:53 crc kubenswrapper[4776]: E0127 19:10:53.868681 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:53 crc kubenswrapper[4776]: E0127 19:10:53.868741 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:09.868724509 +0000 UTC m=+72.374329666 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.913085 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.913121 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.913133 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.913149 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:53 crc kubenswrapper[4776]: I0127 19:10:53.913160 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:53Z","lastTransitionTime":"2026-01-27T19:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.015017 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.015058 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.015068 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.015083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.015092 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.118317 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.118524 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.118614 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.118702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.118791 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.222009 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.222057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.222074 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.222100 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.222117 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.262465 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 08:06:09.999455708 +0000 UTC Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.319158 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:54 crc kubenswrapper[4776]: E0127 19:10:54.319312 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.319170 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:54 crc kubenswrapper[4776]: E0127 19:10:54.319459 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.325059 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.325165 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.325225 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.325284 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.325366 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.427279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.427346 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.427363 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.427388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.427403 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.530073 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.530110 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.530120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.530133 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.530143 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.633552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.633798 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.633870 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.633956 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.634025 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.736361 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.736616 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.736698 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.736787 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.736858 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.838726 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.838985 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.839069 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.839155 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.839233 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.942457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.942493 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.942503 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.942525 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:54 crc kubenswrapper[4776]: I0127 19:10:54.942537 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:54Z","lastTransitionTime":"2026-01-27T19:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:55 crc kubenswrapper[4776]: I0127 19:10:55.045025 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:55 crc kubenswrapper[4776]: I0127 19:10:55.045061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:55 crc kubenswrapper[4776]: I0127 19:10:55.045072 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:55 crc kubenswrapper[4776]: I0127 19:10:55.045090 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:55 crc kubenswrapper[4776]: I0127 19:10:55.045099 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:55Z","lastTransitionTime":"2026-01-27T19:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.084623 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 18:16:05.618064081 +0000 UTC Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.084791 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.084899 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.084951 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.085015 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.085103 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.085204 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.085280 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.085339 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.089731 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.090086 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.090100 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.090117 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.090128 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.091620 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.091651 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.091661 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.091673 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.091690 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.107628 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:56Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.112200 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.112248 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.112263 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.112283 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.112297 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.127545 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:56Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.131736 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.131793 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.131806 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.131823 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.131835 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.144213 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:56Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.149635 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.149687 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.149704 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.149725 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.149740 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.164332 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:56Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.169735 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.169939 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.170036 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.170125 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.170222 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.185967 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:56Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:56 crc kubenswrapper[4776]: E0127 19:10:56.186140 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.192352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.192546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.192643 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.192812 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.192964 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.296707 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.296824 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.296879 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.296905 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.296953 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.399693 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.399733 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.399746 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.399784 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.399801 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.503267 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.503307 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.503318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.503332 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.503343 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.606204 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.606271 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.606295 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.606335 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.606358 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.709567 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.709626 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.709643 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.709666 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.709687 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.812760 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.812890 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.812915 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.812948 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.812975 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.916455 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.916521 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.916534 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.916558 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:56 crc kubenswrapper[4776]: I0127 19:10:56.916574 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:56Z","lastTransitionTime":"2026-01-27T19:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.019238 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.019280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.019292 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.019315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.019326 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.085345 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 18:19:05.072729384 +0000 UTC Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.122140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.122193 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.122207 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.122229 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.122248 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.224945 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.225003 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.225015 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.225041 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.225080 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.319106 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.319106 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:57 crc kubenswrapper[4776]: E0127 19:10:57.319255 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:57 crc kubenswrapper[4776]: E0127 19:10:57.319383 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.327638 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.327679 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.327708 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.327733 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.327750 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.431132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.431211 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.431236 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.431267 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.431289 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.534199 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.534246 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.534257 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.534274 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.534284 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.637655 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.637708 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.637720 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.637736 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.637747 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.740073 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.740126 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.740143 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.740166 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.740183 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.843541 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.843631 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.843649 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.843701 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.843718 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.946022 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.946061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.946072 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.946088 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:57 crc kubenswrapper[4776]: I0127 19:10:57.946100 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:57Z","lastTransitionTime":"2026-01-27T19:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.049692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.049738 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.049751 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.049786 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.049799 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.086358 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 08:10:23.677387495 +0000 UTC Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.153624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.153672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.153684 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.153702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.153714 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.255750 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.255832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.255855 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.255881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.255901 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.318913 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:10:58 crc kubenswrapper[4776]: E0127 19:10:58.319095 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.319163 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:10:58 crc kubenswrapper[4776]: E0127 19:10:58.319372 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.338914 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.353281 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.358490 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.358563 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.358577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.358622 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.358640 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.372621 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.391810 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.404982 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.424316 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.437365 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.451433 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.463296 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.464573 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.464614 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.464623 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.464637 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.464647 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.480210 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.491740 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.502136 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.514431 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.529191 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.542503 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.553005 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.566280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.566318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.566329 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.566344 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.566356 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.577486 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:10:58Z is after 2025-08-24T17:21:41Z" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.668874 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.668943 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.668970 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.669001 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.669022 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.772272 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.772346 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.772365 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.772392 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.772411 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.875439 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.875493 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.875517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.875546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.875572 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.978257 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.978335 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.978359 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.978390 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:58 crc kubenswrapper[4776]: I0127 19:10:58.978413 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:58Z","lastTransitionTime":"2026-01-27T19:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.082308 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.082387 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.082407 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.082439 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.082476 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.087422 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 17:49:50.506830053 +0000 UTC Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.186632 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.186695 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.186719 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.186752 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.186808 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.289279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.289350 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.289370 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.289395 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.289412 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.318587 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.318606 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:10:59 crc kubenswrapper[4776]: E0127 19:10:59.318748 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:10:59 crc kubenswrapper[4776]: E0127 19:10:59.318925 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.392150 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.392203 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.392218 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.392236 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.392248 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.495461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.495525 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.495542 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.495565 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.495582 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.597811 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.597872 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.597890 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.597916 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.597941 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.700517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.700564 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.700575 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.700591 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.700604 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.803503 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.803572 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.803591 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.803617 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.803637 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.906914 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.906984 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.907006 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.907036 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:10:59 crc kubenswrapper[4776]: I0127 19:10:59.907058 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:10:59Z","lastTransitionTime":"2026-01-27T19:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.010181 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.010232 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.010242 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.010256 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.010265 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.088324 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 14:20:56.228725751 +0000 UTC Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.113207 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.113298 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.113317 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.113341 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.113358 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.216086 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.216132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.216143 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.216160 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.216171 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318293 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318610 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318619 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318632 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318642 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318874 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.318905 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:00 crc kubenswrapper[4776]: E0127 19:11:00.319015 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:00 crc kubenswrapper[4776]: E0127 19:11:00.319118 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.421875 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.421966 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.421986 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.422008 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.422025 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.524662 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.524702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.524711 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.524724 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.524735 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.627465 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.627515 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.627531 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.627551 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.627565 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.730208 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.730280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.730296 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.730314 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.730327 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.833559 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.833651 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.833663 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.833679 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.833690 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.936352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.936397 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.936410 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.936426 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:00 crc kubenswrapper[4776]: I0127 19:11:00.936440 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:00Z","lastTransitionTime":"2026-01-27T19:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.039547 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.039691 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.039717 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.039743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.039762 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.088693 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 18:25:05.822780818 +0000 UTC Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.143025 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.143087 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.143103 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.143128 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.143147 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.246669 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.246735 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.246757 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.246814 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.246844 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.318677 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.318677 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:01 crc kubenswrapper[4776]: E0127 19:11:01.318919 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:01 crc kubenswrapper[4776]: E0127 19:11:01.319123 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.351677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.351828 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.351846 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.351904 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.351923 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.454991 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.455035 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.455048 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.455064 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.455076 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.557427 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.557465 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.557474 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.557487 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.557497 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.660200 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.660261 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.660273 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.660290 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.660302 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.763088 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.763145 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.763168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.763196 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.763218 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.865803 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.865853 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.865869 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.865887 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.865902 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.968394 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.968436 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.968447 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.968465 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:01 crc kubenswrapper[4776]: I0127 19:11:01.968477 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:01Z","lastTransitionTime":"2026-01-27T19:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.070523 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.070557 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.070567 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.070581 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.070590 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.089220 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 08:36:56.639386587 +0000 UTC Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.173050 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.173104 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.173116 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.173134 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.173146 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.275340 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.275373 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.275386 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.275401 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.275411 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.318631 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.318643 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:02 crc kubenswrapper[4776]: E0127 19:11:02.318871 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:02 crc kubenswrapper[4776]: E0127 19:11:02.319002 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.378109 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.378157 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.378168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.378182 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.378191 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.481176 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.481223 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.481235 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.481259 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.481272 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.583358 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.583393 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.583400 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.583490 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.583507 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.685859 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.685889 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.685897 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.685912 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.685932 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.789032 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.789080 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.789095 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.789115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.789130 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.891587 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.891644 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.891658 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.891674 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.891686 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.993417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.993451 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.993468 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.993485 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:02 crc kubenswrapper[4776]: I0127 19:11:02.993496 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:02Z","lastTransitionTime":"2026-01-27T19:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.089688 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 17:29:26.971735354 +0000 UTC Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.095383 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.095423 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.095436 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.095454 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.095467 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.197410 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.197450 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.197460 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.197476 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.197486 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.299980 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.300015 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.300025 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.300040 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.300052 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.318773 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.318910 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:03 crc kubenswrapper[4776]: E0127 19:11:03.319016 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:03 crc kubenswrapper[4776]: E0127 19:11:03.319086 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.402670 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.402702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.402714 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.402734 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.402745 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.505168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.505218 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.505230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.505246 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.505259 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.607871 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.607907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.607917 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.607931 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.607939 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.709992 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.710046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.710055 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.710069 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.710087 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.812312 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.812369 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.812387 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.812410 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.812425 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.916005 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.916054 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.916066 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.916084 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:03 crc kubenswrapper[4776]: I0127 19:11:03.916098 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:03Z","lastTransitionTime":"2026-01-27T19:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.018395 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.018466 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.018479 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.018495 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.018506 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.090039 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 02:49:39.166270519 +0000 UTC Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.120349 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.120380 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.120391 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.120407 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.120421 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.222420 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.222665 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.222751 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.222868 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.222957 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.318618 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:04 crc kubenswrapper[4776]: E0127 19:11:04.318756 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.319509 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:04 crc kubenswrapper[4776]: E0127 19:11:04.319803 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.324591 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.324624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.324632 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.324645 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.324673 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.426604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.426650 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.426662 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.426678 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.426691 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.529035 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.529108 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.529120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.529140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.529151 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.631283 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.631336 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.631346 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.631361 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.631371 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.733667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.733698 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.733708 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.733724 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.733733 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.836198 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.836240 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.836258 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.836281 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.836297 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.938423 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.938469 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.938481 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.938499 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:04 crc kubenswrapper[4776]: I0127 19:11:04.938509 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:04Z","lastTransitionTime":"2026-01-27T19:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.041135 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.041173 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.041183 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.041197 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.041207 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.090450 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 11:58:16.336364745 +0000 UTC Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.143242 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.143277 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.143289 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.143306 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.143318 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.245836 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.245881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.245895 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.245913 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.245925 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.318955 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:05 crc kubenswrapper[4776]: E0127 19:11:05.319102 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.319168 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:05 crc kubenswrapper[4776]: E0127 19:11:05.319652 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.320506 4776 scope.go:117] "RemoveContainer" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" Jan 27 19:11:05 crc kubenswrapper[4776]: E0127 19:11:05.320813 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.348154 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.348196 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.348208 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.348225 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.348258 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.450953 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.450982 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.450990 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.451003 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.451012 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.553802 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.554000 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.554020 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.554043 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.554055 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.656017 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.656042 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.656050 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.656059 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.656067 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.758153 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.758183 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.758191 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.758204 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.758212 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.860690 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.860747 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.860786 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.860810 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.860829 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.963764 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.963817 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.963829 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.963845 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:05 crc kubenswrapper[4776]: I0127 19:11:05.963856 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:05Z","lastTransitionTime":"2026-01-27T19:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.066495 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.066530 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.066539 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.066552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.066560 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.091176 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 22:59:03.134726057 +0000 UTC Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.169417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.169456 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.169467 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.169482 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.169492 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.222856 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.222883 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.222893 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.222907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.222917 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.235394 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:06Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.238287 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.238312 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.238322 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.238335 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.238345 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.249423 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:06Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.252062 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.252083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.252092 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.252103 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.252111 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.263288 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:06Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.265807 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.265930 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.266061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.266136 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.266197 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.282445 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:06Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.285695 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.285757 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.285843 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.285882 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.285906 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.304842 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:06Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.304942 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.306417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.306440 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.306449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.306462 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.306471 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.318855 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.318904 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.318976 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:06 crc kubenswrapper[4776]: E0127 19:11:06.319092 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.408982 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.409034 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.409049 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.409067 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.409083 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.512517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.512571 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.512584 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.512605 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.512618 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.615846 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.615891 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.615904 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.615927 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.615942 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.718910 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.719400 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.719505 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.719607 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.719709 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.822248 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.822302 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.822314 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.822352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.822366 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.924280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.924315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.924326 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.924343 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:06 crc kubenswrapper[4776]: I0127 19:11:06.924353 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:06Z","lastTransitionTime":"2026-01-27T19:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.026470 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.026539 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.026552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.026571 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.026617 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.092207 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 19:45:06.959053011 +0000 UTC Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.129053 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.129103 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.129118 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.129140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.129152 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.231434 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.231469 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.231479 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.231494 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.231503 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.319535 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:07 crc kubenswrapper[4776]: E0127 19:11:07.319656 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.319547 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:07 crc kubenswrapper[4776]: E0127 19:11:07.319819 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.333945 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.334010 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.334031 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.334056 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.334076 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.437193 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.437248 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.437259 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.437276 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.437290 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.539581 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.539622 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.539630 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.539643 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.539652 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.642247 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.642295 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.642305 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.642318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.642330 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.744596 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.744626 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.744634 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.744646 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.744655 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.846786 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.846829 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.846841 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.846858 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.846870 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.949502 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.949544 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.949552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.949566 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:07 crc kubenswrapper[4776]: I0127 19:11:07.949574 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:07Z","lastTransitionTime":"2026-01-27T19:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.051241 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.051289 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.051300 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.051316 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.051328 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.092979 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 06:24:52.809124231 +0000 UTC Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.153473 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.153518 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.153527 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.153542 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.153552 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.255613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.255655 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.255668 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.255685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.255697 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.318689 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:08 crc kubenswrapper[4776]: E0127 19:11:08.318806 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.318886 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:08 crc kubenswrapper[4776]: E0127 19:11:08.319034 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.333272 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.343084 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.357876 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.357907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.357918 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.357935 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.357945 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.362474 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.377558 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.390820 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.402313 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.415553 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.427669 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.438223 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.447717 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.456903 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.460461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.460494 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.460502 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.460515 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.460524 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.468957 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.478579 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.489814 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.499681 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.509926 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.520791 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:08Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.563152 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.563186 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.563194 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.563208 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.563216 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.665013 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.665054 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.665066 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.665081 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.665092 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.767309 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.767375 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.767421 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.767445 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.767463 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.869604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.869652 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.869667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.869687 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.869697 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.972444 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.972487 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.972496 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.972511 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:08 crc kubenswrapper[4776]: I0127 19:11:08.972521 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:08Z","lastTransitionTime":"2026-01-27T19:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.075093 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.075128 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.075140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.075155 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.075166 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.093375 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 04:11:31.731870245 +0000 UTC Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.177330 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.177376 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.177388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.177402 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.177414 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.280116 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.280159 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.280168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.280184 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.280194 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.319164 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.319206 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:09 crc kubenswrapper[4776]: E0127 19:11:09.319262 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:09 crc kubenswrapper[4776]: E0127 19:11:09.319428 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.382701 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.382755 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.382792 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.382814 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.382830 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.486012 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.486057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.486066 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.486081 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.486091 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.588613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.588650 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.588659 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.588672 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.588684 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.691272 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.691317 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.691328 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.691347 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.691359 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.793407 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.793452 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.793462 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.793476 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.793485 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.895403 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.895442 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.895454 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.895469 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.895481 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.922706 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:09 crc kubenswrapper[4776]: E0127 19:11:09.922894 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:11:09 crc kubenswrapper[4776]: E0127 19:11:09.922964 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:11:41.922945475 +0000 UTC m=+104.428550652 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.997744 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.997787 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.997796 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.997809 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:09 crc kubenswrapper[4776]: I0127 19:11:09.997819 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:09Z","lastTransitionTime":"2026-01-27T19:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.093846 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 18:37:53.890830131 +0000 UTC Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.100804 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.100848 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.100860 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.100885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.100899 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.203825 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.203875 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.203887 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.203905 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.203939 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.306403 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.306461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.306480 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.306504 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.306524 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.318807 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.318862 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:10 crc kubenswrapper[4776]: E0127 19:11:10.318945 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:10 crc kubenswrapper[4776]: E0127 19:11:10.319144 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.408405 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.408983 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.409078 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.409168 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.409251 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.511702 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.511743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.511752 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.511781 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.511791 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.614329 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.614369 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.614378 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.614393 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.614402 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.716975 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.717048 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.717067 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.717093 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.717112 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.819833 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.819892 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.819914 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.819944 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.819968 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.922897 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.922970 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.922992 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.923018 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:10 crc kubenswrapper[4776]: I0127 19:11:10.923040 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:10Z","lastTransitionTime":"2026-01-27T19:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.025447 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.025495 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.025504 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.025518 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.025527 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.095039 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 18:13:26.863346566 +0000 UTC Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.128034 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.128100 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.128117 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.128145 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.128162 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.230627 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.230685 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.230701 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.230725 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.230743 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.319152 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.319160 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:11 crc kubenswrapper[4776]: E0127 19:11:11.319360 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:11 crc kubenswrapper[4776]: E0127 19:11:11.319485 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.332929 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.332975 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.332996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.333024 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.333046 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.435706 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.435742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.435753 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.435785 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.435799 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.537839 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.537895 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.537904 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.537919 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.537931 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.640464 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.640507 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.640521 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.640537 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.640550 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.742615 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.742661 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.742677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.742695 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.742707 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.844760 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.844831 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.844847 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.844867 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.844882 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.947618 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.947676 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.947692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.947716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:11 crc kubenswrapper[4776]: I0127 19:11:11.947732 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:11Z","lastTransitionTime":"2026-01-27T19:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.050139 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.050187 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.050206 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.050230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.050247 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.095804 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:20:15.090454475 +0000 UTC Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.138884 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/0.log" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.138937 4776 generic.go:334] "Generic (PLEG): container finished" podID="255485b5-2151-4328-b925-c2e1a1f95e1f" containerID="da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad" exitCode=1 Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.138970 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerDied","Data":"da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.139306 4776 scope.go:117] "RemoveContainer" containerID="da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.152667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.152723 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.152740 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.152763 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.152810 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.155175 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.171207 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.185495 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.200854 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:12Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.213150 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.224494 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.234450 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.244828 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.254723 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.254756 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.254782 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.254800 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.254811 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.256829 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.265644 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.275014 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.283569 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.291935 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.299542 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.307615 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.314677 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.320977 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:12 crc kubenswrapper[4776]: E0127 19:11:12.321072 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.321221 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:12 crc kubenswrapper[4776]: E0127 19:11:12.321285 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.329001 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:12Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.357214 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.357244 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.357254 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.357269 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.357280 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.459081 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.459115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.459127 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.459143 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.459153 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.561625 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.561663 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.561674 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.561694 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.561707 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.663336 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.663368 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.663376 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.663415 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.663427 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.766656 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.767060 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.767493 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.767682 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.767900 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.871832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.872480 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.872648 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.872837 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.872975 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.975684 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.975921 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.976101 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.976188 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:12 crc kubenswrapper[4776]: I0127 19:11:12.976259 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:12Z","lastTransitionTime":"2026-01-27T19:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.078818 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.079041 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.079097 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.079159 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.079214 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.095883 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 07:34:11.461663017 +0000 UTC Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.143918 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/0.log" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.144175 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerStarted","Data":"60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.156016 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.166453 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.182049 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.182117 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.182810 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.182972 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.183099 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.183215 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.195681 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.209780 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.226706 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.240817 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.261088 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.273319 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.283176 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.285753 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.285793 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.285811 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.285828 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.285854 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.295407 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.309916 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.319541 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:13 crc kubenswrapper[4776]: E0127 19:11:13.319885 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.320267 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:13 crc kubenswrapper[4776]: E0127 19:11:13.320504 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.320314 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.335809 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.349451 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.362794 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.383801 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:13Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.388336 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.388379 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.388388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.388403 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.388413 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.490953 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.491874 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.492096 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.492337 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.492847 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.595932 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.595990 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.596008 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.596030 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.596051 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.699101 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.699166 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.699193 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.699218 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.699234 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.802703 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.802759 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.802808 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.802835 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.802856 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.905647 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.905958 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.906001 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.906031 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:13 crc kubenswrapper[4776]: I0127 19:11:13.906046 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:13Z","lastTransitionTime":"2026-01-27T19:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.008387 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.008442 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.008460 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.008482 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.008499 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.096127 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 18:33:38.30969754 +0000 UTC Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.111554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.111599 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.111613 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.111632 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.111645 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.214677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.214726 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.214742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.214765 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.214888 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.317726 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.317822 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.317848 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.317878 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.317900 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.319019 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:14 crc kubenswrapper[4776]: E0127 19:11:14.319202 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.319534 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:14 crc kubenswrapper[4776]: E0127 19:11:14.319675 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.420146 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.420178 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.420186 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.420199 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.420210 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.522195 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.522238 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.522252 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.522267 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.522276 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.624163 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.624219 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.624232 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.624248 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.624260 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.727352 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.727413 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.727433 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.727457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.727474 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.831413 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.831461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.831472 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.831489 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.831500 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.934452 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.934501 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.934516 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.934535 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:14 crc kubenswrapper[4776]: I0127 19:11:14.934547 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:14Z","lastTransitionTime":"2026-01-27T19:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.037577 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.037617 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.037626 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.037641 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.037650 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.097353 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 19:15:51.962830684 +0000 UTC Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.139970 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.140006 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.140015 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.140027 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.140036 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.242733 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.242839 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.242862 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.242889 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.242911 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.319399 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:15 crc kubenswrapper[4776]: E0127 19:11:15.319517 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.319400 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:15 crc kubenswrapper[4776]: E0127 19:11:15.319610 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.344692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.344721 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.344742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.344753 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.344788 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.447598 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.447668 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.447692 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.447717 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.447736 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.550224 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.550261 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.550270 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.550285 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.550296 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.653100 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.653145 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.653159 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.653176 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.653189 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.756493 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.756553 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.756563 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.756580 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.756591 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.860372 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.860443 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.860467 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.860500 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.860526 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.963646 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.963716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.963736 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.963761 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:15 crc kubenswrapper[4776]: I0127 19:11:15.963807 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:15Z","lastTransitionTime":"2026-01-27T19:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.066820 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.066870 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.066885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.066907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.066924 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.098165 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 03:00:02.862240391 +0000 UTC Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.168745 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.169251 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.169264 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.169280 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.169291 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.271339 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.271624 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.271701 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.271800 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.271892 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.319376 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.319606 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.319745 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.320430 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.320920 4776 scope.go:117] "RemoveContainer" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.376612 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.376677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.376697 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.376728 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.376744 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.479865 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.479896 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.479906 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.479923 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.479936 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.575696 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.575742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.575752 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.575787 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.575802 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.588177 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:16Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.594047 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.594079 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.594090 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.594106 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.594114 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.608909 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:16Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.611856 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.611893 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.611907 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.611924 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.611936 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.628943 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:16Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.631375 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.631432 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.631443 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.631461 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.631475 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.647605 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:16Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.651392 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.651430 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.651439 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.651452 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.651460 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.665226 4776 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"0ffe3ef6-9521-4905-b9d5-2ecfe054a2e9\\\",\\\"systemUUID\\\":\\\"9a4ff058-5a9d-4bfd-8fc3-20b41763605e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:16Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:16 crc kubenswrapper[4776]: E0127 19:11:16.665405 4776 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.666888 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.666929 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.666945 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.666964 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.666976 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.770037 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.770093 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.770105 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.770124 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.770137 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.872301 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.872333 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.872343 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.872356 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.872365 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.975052 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.975094 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.975105 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.975123 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:16 crc kubenswrapper[4776]: I0127 19:11:16.975134 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:16Z","lastTransitionTime":"2026-01-27T19:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.077279 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.077328 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.077341 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.077358 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.077371 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.098529 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 21:57:09.111607497 +0000 UTC Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.160187 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/2.log" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.162682 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.163538 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.163991 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.179686 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.179729 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.179740 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.179757 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.179785 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.192592 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.210301 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.236147 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.248538 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.259723 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.277730 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.285476 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.285521 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.285534 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.285550 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.285567 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.300190 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.319255 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.319263 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:17 crc kubenswrapper[4776]: E0127 19:11:17.319381 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:17 crc kubenswrapper[4776]: E0127 19:11:17.319461 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.321647 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.334413 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.346223 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.359482 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.372445 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.382214 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.387429 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.387485 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.387499 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.387517 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.387529 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.395930 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.405544 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.416414 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.427292 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:17Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.490042 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.490107 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.490126 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.490150 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.490168 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.592087 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.592115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.592126 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.592142 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.592154 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.694732 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.694846 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.694885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.694913 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.694930 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.797006 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.797057 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.797069 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.797087 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.797099 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.899470 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.899526 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.899549 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.899579 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:17 crc kubenswrapper[4776]: I0127 19:11:17.899599 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:17Z","lastTransitionTime":"2026-01-27T19:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.002391 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.002444 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.002452 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.002475 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.002520 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.099571 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 17:22:30.776799009 +0000 UTC Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.106189 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.106251 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.106269 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.106291 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.106309 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.170106 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/3.log" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.171143 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/2.log" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.174851 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.176381 4776 generic.go:334] "Generic (PLEG): container finished" podID="0acf82d2-cb56-42c0-9762-895319b4c26c" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" exitCode=1 Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.176420 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerDied","Data":"ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.176468 4776 scope.go:117] "RemoveContainer" containerID="72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.178225 4776 scope.go:117] "RemoveContainer" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" Jan 27 19:11:18 crc kubenswrapper[4776]: E0127 19:11:18.178572 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.196899 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.208874 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.208903 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.208915 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.208930 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.208940 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.213288 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.228505 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.243013 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.258286 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.274869 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.289574 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.302030 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.312052 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.312082 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.312093 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.312117 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.312126 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.313207 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.319034 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.319048 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:18 crc kubenswrapper[4776]: E0127 19:11:18.319202 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:18 crc kubenswrapper[4776]: E0127 19:11:18.319310 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.328412 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.342897 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.355168 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.365463 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.379218 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.395067 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.409953 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.413733 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.413789 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.413802 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.413817 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.413829 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.427447 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:17Z\\\",\\\"message\\\":\\\"t/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.296687 6870 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.297459 6870 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0127 19:11:17.297483 6870 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0127 19:11:17.297508 6870 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:11:17.297514 6870 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 19:11:17.297524 6870 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0127 19:11:17.297558 6870 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0127 19:11:17.297568 6870 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0127 19:11:17.297582 6870 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:11:17.297593 6870 handler.go:208] Removed *v1.Node event handler 2\\\\nI0127 19:11:17.297599 6870 factory.go:656] Stopping watch factory\\\\nI0127 19:11:17.297600 6870 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0127 19:11:17.297606 6870 handler.go:208] Removed *v1.Node event handler 7\\\\nI0127 19:11:17.297612 6870 ovnkube.go:599] Stopped ovnkube\\\\nI0127 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.439019 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.451647 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.464290 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.476977 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.489922 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.499515 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517132 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517205 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517217 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517242 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517256 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.517986 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://72b91a6cb2f327dae12b0d87d88388db9242e251fd8733de88723fc4d1de0375\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:50Z\\\",\\\"message\\\":\\\"50Z is after 2025-08-24T17:21:41Z]\\\\nI0127 19:10:50.283713 6466 services_controller.go:434] Service openshift-ingress-operator/metrics retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{metrics openshift-ingress-operator 06971ebf-e383-4ded-a48b-3b2e61be789e 4180 0 2025-02-23 05:12:34 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[name:ingress-operator] map[capability.openshift.io/name:Ingress include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:metrics-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc006917d27 \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:metrics,Protocol:TCP,Port:9393,TargetPort:{1 0 metrics},NodePort:0,AppProtocol:nil,},},Selector:map[string]string{name: ingress-operator,},ClusterIP:10.217.5.244,Type:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:49Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:17Z\\\",\\\"message\\\":\\\"t/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.296687 6870 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.297459 6870 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0127 19:11:17.297483 6870 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0127 19:11:17.297508 6870 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:11:17.297514 6870 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 19:11:17.297524 6870 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0127 19:11:17.297558 6870 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0127 19:11:17.297568 6870 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0127 19:11:17.297582 6870 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:11:17.297593 6870 handler.go:208] Removed *v1.Node event handler 2\\\\nI0127 19:11:17.297599 6870 factory.go:656] Stopping watch factory\\\\nI0127 19:11:17.297600 6870 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0127 19:11:17.297606 6870 handler.go:208] Removed *v1.Node event handler 7\\\\nI0127 19:11:17.297612 6870 ovnkube.go:599] Stopped ovnkube\\\\nI0127 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:11:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.529130 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.540499 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.550812 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.560249 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.568477 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.578980 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.588062 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.596610 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.606892 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621628 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621759 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:18Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621875 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621911 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621928 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.621937 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.723755 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.723820 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.723838 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.723855 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.723867 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.826607 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.826667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.826683 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.826703 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.826722 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.928723 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.928781 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.928795 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.928823 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:18 crc kubenswrapper[4776]: I0127 19:11:18.928832 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:18Z","lastTransitionTime":"2026-01-27T19:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.031845 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.031881 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.031891 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.031913 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.031931 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.100149 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 03:52:37.080943454 +0000 UTC Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.134653 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.134714 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.134742 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.134795 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.134819 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.184673 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/3.log" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.187720 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.189694 4776 scope.go:117] "RemoveContainer" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" Jan 27 19:11:19 crc kubenswrapper[4776]: E0127 19:11:19.189914 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.203731 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30ebe3a9-0400-4b77-a684-d42aeab83b81\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9d09799e92f789db0fd7077d34d99557d1f48b29ef6913525d73446dfac5942a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a76216648fe603e922028f39eb9efb9ff18864054115c123f8bcea132c3db026\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3f5c587739451d2571c4484c3fd6e635b5f584254d4b810339940c77bf5ac8b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d4df3bfa11afa1734188c1986c5c17f1e7cc93f896027bf713416a455f28425f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.214860 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78796a7723b97d68c35740fe3ee2c925d8aa4b13c2f8ac076b2a3ce9be20ad4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.225686 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.236860 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.236949 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.236965 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.236980 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.236990 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.238670 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-wmmtr" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"255485b5-2151-4328-b925-c2e1a1f95e1f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:11:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:11Z\\\",\\\"message\\\":\\\"2026-01-27T19:10:26+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5\\\\n2026-01-27T19:10:26+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_a8fc56b8-e7e3-484d-9160-1a91bfd6bab5 to /host/opt/cni/bin/\\\\n2026-01-27T19:10:26Z [verbose] multus-daemon started\\\\n2026-01-27T19:10:26Z [verbose] Readiness Indicator file check\\\\n2026-01-27T19:11:11Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:11:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4wgnn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-wmmtr\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.252257 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5bea6ea5-d676-42cb-83ea-9a2c6cab54f1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-27T19:10:02Z\\\",\\\"message\\\":\\\"W0127 19:10:01.465802 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0127 19:10:01.466204 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769541001 cert, and key in /tmp/serving-cert-3728054693/serving-signer.crt, /tmp/serving-cert-3728054693/serving-signer.key\\\\nI0127 19:10:01.799294 1 observer_polling.go:159] Starting file observer\\\\nW0127 19:10:01.804512 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0127 19:10:01.804679 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0127 19:10:01.805363 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3728054693/tls.crt::/tmp/serving-cert-3728054693/tls.key\\\\\\\"\\\\nF0127 19:10:02.013709 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:01Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:09:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.263702 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"56e30e32-cd28-4430-84c0-81b21e1e8312\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:00Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:09:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://85827535f8fd79fa4de321a4fc701e6044250d7df38efeb59e8d22ecfdfcf9a8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://887dc4e9d4dd37eff6faffaadfe9e28cffdd3758d681f19b8dce46ffdbc91f6d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:09:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d27bb3ef6fec892fed1dc4ed76d99fc907e7d892cfb77374bd6bea445a82317e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:09:58Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.275631 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:21Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9a79e17569d166f24cc5a809c3de639fa722ea9ffd889e9add208014b935474a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.288154 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.303175 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e047604-a155-4166-9ab0-0577cbe70bb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81df7157384e106aa2bee6dfc7534752fc069dcccfde8afafee5551f6ae0facb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://238e3eb7c111694f7d738863a9ebe53917f2cf417660225bccfa58a2052497b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bd815d69a39f326760599063d616232d594c386859d15f9effcd536ac14d3b94\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956df4e909acd059df2be440e382e64519b7e4fe65fe6696c17e8d4c31bca45e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ba408cf81519d9e34d4c54234d95ed47ec7510c851cc0ef0cfd14f12ba9f1881\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://70b6efd90e24c40b039fde9db915b7b1bd0c3db3fc4cb54a7773c191b5fb17ad\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ea020fa492b07e8fd96cddd24375b8aeeeaa10c622536195cbab08c437960bd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hmbpv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-hbtwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.313727 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"38523e9f-4493-4055-83b7-9e70ce801c8c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bc586932518131a894c960dadf4759232cd32881e6a13cd5acacf9aba121ad12\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3d107b459be06a059216390810a78043bc60d8929619218a8c51cb069eca93f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wwn9r\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wqrsg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.318815 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:19 crc kubenswrapper[4776]: E0127 19:11:19.318956 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.319201 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:19 crc kubenswrapper[4776]: E0127 19:11:19.319306 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.325186 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:20Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08c9ffa43eae1dcdbe187d5c7203a228ac2e92837497d87f38662b3484c10a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed7c8803454ed8e4a684a5f66b90cc7e2d7dad2ab55042d78b62ea21b536c90b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.335116 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xnmft" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6cc042e-9fd6-43f0-8655-72c53984b85e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f4be6addc3e1781c07724538e966a95f93ff80135363a2949e1408c88c5cb3cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gf4st\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xnmft\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.339023 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.339052 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.339061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.339076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.339085 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.345626 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ebf810d-762c-445f-9705-a74d274fcf1b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b439deb38f6742935b5837e1ca618b579ea6f8469c62b1b5a2288fe4ec852464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-blcnp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-z2w9t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.355246 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28a63e45-994f-41fe-82fb-605486d13b98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:38Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gszlp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:38Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-t7tbg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.373982 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.384393 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qz4kb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fe3d3bb-f8f9-4b84-9b65-8d60cab75a2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c1e8134a6d01fb08f0e0535952f34febfb60a3efa8a007e76210bf18271659b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hgkwp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qz4kb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.400475 4776 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0acf82d2-cb56-42c0-9762-895319b4c26c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-27T19:10:23Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e8034ca3a4188cb8b7455c65f9efaf16ccba78e125f84a2dc793564d4d63b33b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0357ead021b8a6a5b8f224adeaf09ab73d7e5b313dabbda68243f08f4627257\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd7bc0019e4a9f8c95c2e306af9facc97567ae483d3edc2a4aa858c860c0b4e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d61e3f09b137e68b5d27d52fd3b46530ca80941f92904ff26d47a1eca69d10c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a867f3daba922d137ee116311ad520b946fd7415273a575108b4952b83034c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3ee6a3f0058e286d2b609f64d21ce023857281c5fd63e061446a9db84aaef834\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad984c59a0ac63ac388241e01d18467f88393a414936e1cca19168f10df435aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-27T19:11:17Z\\\",\\\"message\\\":\\\"t/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.296687 6870 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0127 19:11:17.297459 6870 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0127 19:11:17.297483 6870 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0127 19:11:17.297508 6870 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0127 19:11:17.297514 6870 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0127 19:11:17.297524 6870 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0127 19:11:17.297558 6870 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0127 19:11:17.297568 6870 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0127 19:11:17.297582 6870 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0127 19:11:17.297593 6870 handler.go:208] Removed *v1.Node event handler 2\\\\nI0127 19:11:17.297599 6870 factory.go:656] Stopping watch factory\\\\nI0127 19:11:17.297600 6870 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0127 19:11:17.297606 6870 handler.go:208] Removed *v1.Node event handler 7\\\\nI0127 19:11:17.297612 6870 ovnkube.go:599] Stopped ovnkube\\\\nI0127 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-27T19:11:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32ede35d45cc8c0eb5117437e59321418707f80419da8e778ae8959e5a447b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-27T19:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://634fb52acf176e2399df1562db597a8c43c76d643f5f0be4885707ef33352155\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-27T19:10:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-27T19:10:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k7dhf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-27T19:10:23Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-d7d6j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-27T19:11:19Z is after 2025-08-24T17:21:41Z" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.441454 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.441513 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.441527 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.441544 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.441576 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.544148 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.544177 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.544186 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.544199 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.544207 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.647009 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.647048 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.647060 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.647077 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.647088 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.749687 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.749739 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.749755 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.749809 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.749827 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.852105 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.852129 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.852140 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.852153 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.852164 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.955174 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.955226 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.955244 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.955269 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:19 crc kubenswrapper[4776]: I0127 19:11:19.955286 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:19Z","lastTransitionTime":"2026-01-27T19:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.057594 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.057630 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.057642 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.057659 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.057671 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.100617 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 18:39:47.741257172 +0000 UTC Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.160478 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.160520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.160530 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.160546 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.160557 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.263808 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.263852 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.263863 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.263880 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.263895 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.318961 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.319034 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:20 crc kubenswrapper[4776]: E0127 19:11:20.319212 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:20 crc kubenswrapper[4776]: E0127 19:11:20.319357 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.367324 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.367371 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.367381 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.367421 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.367438 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.470246 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.470285 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.470297 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.470314 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.470326 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.572537 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.572604 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.572626 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.572667 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.572706 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.675263 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.675306 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.675318 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.675334 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.675345 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.778476 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.778539 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.778561 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.778589 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.778612 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.881343 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.881419 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.881446 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.881484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.881511 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.984348 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.984387 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.984395 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.984409 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:20 crc kubenswrapper[4776]: I0127 19:11:20.984418 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:20Z","lastTransitionTime":"2026-01-27T19:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.086831 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.086877 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.086891 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.086908 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.086919 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.101019 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 19:17:54.775195863 +0000 UTC Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.190210 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.190253 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.190269 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.190292 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.190308 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.292645 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.292704 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.292721 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.292744 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.292763 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.319198 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.319288 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:21 crc kubenswrapper[4776]: E0127 19:11:21.319363 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:21 crc kubenswrapper[4776]: E0127 19:11:21.319463 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.395895 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.396052 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.396076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.396109 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.396130 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.499136 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.499189 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.499203 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.499224 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.499238 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.602406 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.602437 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.602462 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.602475 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.602483 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.705568 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.705716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.705735 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.705762 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.705815 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.808620 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.808900 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.808944 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.809024 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.809048 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.911464 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.911503 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.911511 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.911523 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.911531 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:21Z","lastTransitionTime":"2026-01-27T19:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:21 crc kubenswrapper[4776]: I0127 19:11:21.950214 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:11:21 crc kubenswrapper[4776]: E0127 19:11:21.950676 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.950640593 +0000 UTC m=+148.456245770 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.014191 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.014264 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.014349 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.014388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.014410 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.051753 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.051874 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.051907 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.051938 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052068 4776 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052093 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052104 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052120 4776 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052146 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052177 4776 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052125 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.052108586 +0000 UTC m=+148.557713733 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052257 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.052231 +0000 UTC m=+148.557836187 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052280 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.052268631 +0000 UTC m=+148.557873818 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052137 4776 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052301 4776 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.052341 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.052330433 +0000 UTC m=+148.557935620 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.102010 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 22:36:26.688438931 +0000 UTC Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.120115 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.120193 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.120205 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.120221 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.120233 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.223085 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.223177 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.223204 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.223236 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.223257 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.319578 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.319629 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.319936 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:22 crc kubenswrapper[4776]: E0127 19:11:22.320117 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.326075 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.326130 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.326149 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.326172 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.326189 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.428409 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.428449 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.428457 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.428471 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.428480 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.531727 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.531796 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.531813 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.531832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.531847 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.634261 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.634323 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.634341 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.634368 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.634387 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.736650 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.736719 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.736743 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.736802 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.736821 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.840107 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.840171 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.840189 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.840214 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.840234 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.943284 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.943529 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.943554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.943588 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:22 crc kubenswrapper[4776]: I0127 19:11:22.943610 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:22Z","lastTransitionTime":"2026-01-27T19:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.046586 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.046730 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.046757 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.046838 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.046875 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.102952 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 18:24:44.626137081 +0000 UTC Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.149197 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.149298 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.149321 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.149346 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.149362 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.252061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.252097 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.252106 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.252120 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.252129 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.319258 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.319339 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:23 crc kubenswrapper[4776]: E0127 19:11:23.319833 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:23 crc kubenswrapper[4776]: E0127 19:11:23.319931 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.332115 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.355520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.355559 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.355568 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.355587 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.355599 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.458590 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.458648 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.458659 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.458680 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.458694 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.560382 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.560417 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.560425 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.560438 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.560447 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.662714 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.662825 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.662837 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.662854 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.662866 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.765230 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.765270 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.765282 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.765299 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.765311 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.867832 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.867872 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.867884 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.867899 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.867910 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.970083 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.970158 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.970234 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.970315 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:23 crc kubenswrapper[4776]: I0127 19:11:23.970343 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:23Z","lastTransitionTime":"2026-01-27T19:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.073103 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.073155 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.073165 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.073179 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.073190 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.103875 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:27:50.966499793 +0000 UTC Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.175478 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.175524 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.175532 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.175549 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.175563 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.278040 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.278349 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.278363 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.278378 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.278388 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.319066 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.319117 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:24 crc kubenswrapper[4776]: E0127 19:11:24.319221 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:24 crc kubenswrapper[4776]: E0127 19:11:24.319295 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.381616 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.381680 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.381693 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.381716 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.381737 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.484873 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.484954 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.484976 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.485004 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.485030 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.588885 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.588983 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.589011 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.589040 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.589062 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.691336 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.691396 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.691414 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.691438 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.691459 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.794002 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.794061 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.794077 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.794096 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.794111 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.896484 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.896522 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.896534 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.896552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.896565 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.998715 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.998755 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.998780 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.998796 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:24 crc kubenswrapper[4776]: I0127 19:11:24.998806 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:24Z","lastTransitionTime":"2026-01-27T19:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.101116 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.101158 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.101169 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.101186 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.101199 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.104375 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 02:53:35.877283767 +0000 UTC Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.203912 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.203963 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.203975 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.203989 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.203999 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.318699 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.318794 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:25 crc kubenswrapper[4776]: E0127 19:11:25.318878 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:25 crc kubenswrapper[4776]: E0127 19:11:25.319033 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.320225 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.320261 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.320274 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.320292 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.320304 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.423554 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.423619 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.423644 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.423677 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.423701 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.527101 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.527148 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.527158 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.527175 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.527187 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.630402 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.630459 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.630480 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.630505 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.630524 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.732909 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.732990 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.733011 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.733034 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.733051 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.836076 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.836110 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.836121 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.836136 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.836147 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.939324 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.939876 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.940105 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.940304 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:25 crc kubenswrapper[4776]: I0127 19:11:25.940484 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:25Z","lastTransitionTime":"2026-01-27T19:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.043428 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.043496 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.043520 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.043552 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.043573 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.105171 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 18:36:54.129977222 +0000 UTC Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.146339 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.146375 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.146387 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.146405 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.146416 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.249384 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.249450 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.249472 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.249496 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.249515 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.318711 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.318732 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:26 crc kubenswrapper[4776]: E0127 19:11:26.318958 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:26 crc kubenswrapper[4776]: E0127 19:11:26.319067 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.352501 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.352533 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.352545 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.352561 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.352572 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.455720 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.455816 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.455842 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.455869 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.455887 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.557759 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.557828 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.557842 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.557862 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.557875 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.660925 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.660996 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.661019 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.661046 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.661067 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.764247 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.764303 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.764317 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.764337 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.764353 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.867578 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.867648 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.867664 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.867689 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.867708 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.970356 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.970398 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.970408 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.970424 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:26 crc kubenswrapper[4776]: I0127 19:11:26.970437 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:26Z","lastTransitionTime":"2026-01-27T19:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.009642 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.009719 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.009732 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.009754 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.009787 4776 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-27T19:11:27Z","lastTransitionTime":"2026-01-27T19:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.064090 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp"] Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.064610 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.067228 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.067263 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.067309 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.068123 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.105371 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 18:55:43.561741048 +0000 UTC Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.105410 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.111734 4776 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.135623 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qz4kb" podStartSLOduration=65.135600496 podStartE2EDuration="1m5.135600496s" podCreationTimestamp="2026-01-27 19:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.102194501 +0000 UTC m=+89.607799648" watchObservedRunningTime="2026-01-27 19:11:27.135600496 +0000 UTC m=+89.641205643" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.186941 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.18692585 podStartE2EDuration="38.18692585s" podCreationTimestamp="2026-01-27 19:10:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.164976014 +0000 UTC m=+89.670581161" watchObservedRunningTime="2026-01-27 19:11:27.18692585 +0000 UTC m=+89.692530997" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.207383 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.207419 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/58cb6037-7875-4011-8af5-c6d1cd56141f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.207438 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/58cb6037-7875-4011-8af5-c6d1cd56141f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.207550 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cb6037-7875-4011-8af5-c6d1cd56141f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.207581 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.219039 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-wmmtr" podStartSLOduration=64.219020524 podStartE2EDuration="1m4.219020524s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.218759326 +0000 UTC m=+89.724364483" watchObservedRunningTime="2026-01-27 19:11:27.219020524 +0000 UTC m=+89.724625691" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.231528 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wqrsg" podStartSLOduration=64.231505055 podStartE2EDuration="1m4.231505055s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.231403092 +0000 UTC m=+89.737008249" watchObservedRunningTime="2026-01-27 19:11:27.231505055 +0000 UTC m=+89.737110242" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.264103 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=70.264078493 podStartE2EDuration="1m10.264078493s" podCreationTimestamp="2026-01-27 19:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.245953036 +0000 UTC m=+89.751558243" watchObservedRunningTime="2026-01-27 19:11:27.264078493 +0000 UTC m=+89.769683680" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.274451 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=69.274432927 podStartE2EDuration="1m9.274432927s" podCreationTimestamp="2026-01-27 19:10:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.263137334 +0000 UTC m=+89.768742501" watchObservedRunningTime="2026-01-27 19:11:27.274432927 +0000 UTC m=+89.780038074" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.304197 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-hbtwl" podStartSLOduration=64.304178218 podStartE2EDuration="1m4.304178218s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.304034813 +0000 UTC m=+89.809640030" watchObservedRunningTime="2026-01-27 19:11:27.304178218 +0000 UTC m=+89.809783365" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.308993 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309273 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/58cb6037-7875-4011-8af5-c6d1cd56141f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309462 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/58cb6037-7875-4011-8af5-c6d1cd56141f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309120 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309691 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cb6037-7875-4011-8af5-c6d1cd56141f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309814 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.309886 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/58cb6037-7875-4011-8af5-c6d1cd56141f-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.310368 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/58cb6037-7875-4011-8af5-c6d1cd56141f-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.318918 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:27 crc kubenswrapper[4776]: E0127 19:11:27.319437 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.319514 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cb6037-7875-4011-8af5-c6d1cd56141f-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.319556 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:27 crc kubenswrapper[4776]: E0127 19:11:27.320374 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.331020 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=4.331004166 podStartE2EDuration="4.331004166s" podCreationTimestamp="2026-01-27 19:11:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.330682376 +0000 UTC m=+89.836287533" watchObservedRunningTime="2026-01-27 19:11:27.331004166 +0000 UTC m=+89.836609323" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.332926 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/58cb6037-7875-4011-8af5-c6d1cd56141f-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hk6xp\" (UID: \"58cb6037-7875-4011-8af5-c6d1cd56141f\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.361747 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-xnmft" podStartSLOduration=65.361719457 podStartE2EDuration="1m5.361719457s" podCreationTimestamp="2026-01-27 19:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.361534821 +0000 UTC m=+89.867139978" watchObservedRunningTime="2026-01-27 19:11:27.361719457 +0000 UTC m=+89.867324664" Jan 27 19:11:27 crc kubenswrapper[4776]: I0127 19:11:27.382867 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" Jan 27 19:11:27 crc kubenswrapper[4776]: W0127 19:11:27.401716 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58cb6037_7875_4011_8af5_c6d1cd56141f.slice/crio-f0d60e98e23a74126e1630945eec05f636ecc5af0c6ab0691ecf673f9cd8f19b WatchSource:0}: Error finding container f0d60e98e23a74126e1630945eec05f636ecc5af0c6ab0691ecf673f9cd8f19b: Status 404 returned error can't find the container with id f0d60e98e23a74126e1630945eec05f636ecc5af0c6ab0691ecf673f9cd8f19b Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.221867 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" event={"ID":"58cb6037-7875-4011-8af5-c6d1cd56141f","Type":"ContainerStarted","Data":"b668abea46df4fe11a9adb6d6e1ba5064d77cd8c484f48c01d99f2509e4366f6"} Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.221931 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" event={"ID":"58cb6037-7875-4011-8af5-c6d1cd56141f","Type":"ContainerStarted","Data":"f0d60e98e23a74126e1630945eec05f636ecc5af0c6ab0691ecf673f9cd8f19b"} Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.239584 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hk6xp" podStartSLOduration=65.239557666 podStartE2EDuration="1m5.239557666s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:28.239258677 +0000 UTC m=+90.744863914" watchObservedRunningTime="2026-01-27 19:11:28.239557666 +0000 UTC m=+90.745162833" Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.240010 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podStartSLOduration=65.24000118 podStartE2EDuration="1m5.24000118s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:27.372947788 +0000 UTC m=+89.878552975" watchObservedRunningTime="2026-01-27 19:11:28.24000118 +0000 UTC m=+90.745606337" Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.318941 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:28 crc kubenswrapper[4776]: E0127 19:11:28.320248 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:28 crc kubenswrapper[4776]: I0127 19:11:28.320399 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:28 crc kubenswrapper[4776]: E0127 19:11:28.320549 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:29 crc kubenswrapper[4776]: I0127 19:11:29.319456 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:29 crc kubenswrapper[4776]: I0127 19:11:29.319456 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:29 crc kubenswrapper[4776]: E0127 19:11:29.320107 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:29 crc kubenswrapper[4776]: E0127 19:11:29.320337 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:30 crc kubenswrapper[4776]: I0127 19:11:30.319077 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:30 crc kubenswrapper[4776]: I0127 19:11:30.319077 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:30 crc kubenswrapper[4776]: E0127 19:11:30.319229 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:30 crc kubenswrapper[4776]: E0127 19:11:30.319367 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:31 crc kubenswrapper[4776]: I0127 19:11:31.318898 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:31 crc kubenswrapper[4776]: E0127 19:11:31.319045 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:31 crc kubenswrapper[4776]: I0127 19:11:31.318894 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:31 crc kubenswrapper[4776]: E0127 19:11:31.319267 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:32 crc kubenswrapper[4776]: I0127 19:11:32.319372 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:32 crc kubenswrapper[4776]: I0127 19:11:32.319420 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:32 crc kubenswrapper[4776]: E0127 19:11:32.319549 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:32 crc kubenswrapper[4776]: E0127 19:11:32.319684 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:33 crc kubenswrapper[4776]: I0127 19:11:33.318995 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:33 crc kubenswrapper[4776]: E0127 19:11:33.319127 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:33 crc kubenswrapper[4776]: I0127 19:11:33.319202 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:33 crc kubenswrapper[4776]: E0127 19:11:33.319486 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:33 crc kubenswrapper[4776]: I0127 19:11:33.319714 4776 scope.go:117] "RemoveContainer" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" Jan 27 19:11:33 crc kubenswrapper[4776]: E0127 19:11:33.319929 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:11:34 crc kubenswrapper[4776]: I0127 19:11:34.319001 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:34 crc kubenswrapper[4776]: E0127 19:11:34.319212 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:34 crc kubenswrapper[4776]: I0127 19:11:34.319873 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:34 crc kubenswrapper[4776]: E0127 19:11:34.320027 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:35 crc kubenswrapper[4776]: I0127 19:11:35.319033 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:35 crc kubenswrapper[4776]: I0127 19:11:35.319127 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:35 crc kubenswrapper[4776]: E0127 19:11:35.319524 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:35 crc kubenswrapper[4776]: E0127 19:11:35.319706 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:36 crc kubenswrapper[4776]: I0127 19:11:36.319483 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:36 crc kubenswrapper[4776]: I0127 19:11:36.319500 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:36 crc kubenswrapper[4776]: E0127 19:11:36.319946 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:36 crc kubenswrapper[4776]: E0127 19:11:36.319737 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:37 crc kubenswrapper[4776]: I0127 19:11:37.318910 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:37 crc kubenswrapper[4776]: I0127 19:11:37.318910 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:37 crc kubenswrapper[4776]: E0127 19:11:37.319141 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:37 crc kubenswrapper[4776]: E0127 19:11:37.319315 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:38 crc kubenswrapper[4776]: I0127 19:11:38.318805 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:38 crc kubenswrapper[4776]: I0127 19:11:38.321037 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:38 crc kubenswrapper[4776]: E0127 19:11:38.321152 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:38 crc kubenswrapper[4776]: E0127 19:11:38.321381 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:39 crc kubenswrapper[4776]: I0127 19:11:39.319256 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:39 crc kubenswrapper[4776]: I0127 19:11:39.319266 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:39 crc kubenswrapper[4776]: E0127 19:11:39.319468 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:39 crc kubenswrapper[4776]: E0127 19:11:39.319615 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:40 crc kubenswrapper[4776]: I0127 19:11:40.319583 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:40 crc kubenswrapper[4776]: E0127 19:11:40.319705 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:40 crc kubenswrapper[4776]: I0127 19:11:40.320068 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:40 crc kubenswrapper[4776]: E0127 19:11:40.320176 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:41 crc kubenswrapper[4776]: I0127 19:11:41.318906 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:41 crc kubenswrapper[4776]: I0127 19:11:41.318989 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:41 crc kubenswrapper[4776]: E0127 19:11:41.319038 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:41 crc kubenswrapper[4776]: E0127 19:11:41.319207 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:41 crc kubenswrapper[4776]: I0127 19:11:41.969066 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:41 crc kubenswrapper[4776]: E0127 19:11:41.969312 4776 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:11:41 crc kubenswrapper[4776]: E0127 19:11:41.969437 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs podName:28a63e45-994f-41fe-82fb-605486d13b98 nodeName:}" failed. No retries permitted until 2026-01-27 19:12:45.96940502 +0000 UTC m=+168.475010207 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs") pod "network-metrics-daemon-t7tbg" (UID: "28a63e45-994f-41fe-82fb-605486d13b98") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 27 19:11:42 crc kubenswrapper[4776]: I0127 19:11:42.319632 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:42 crc kubenswrapper[4776]: I0127 19:11:42.319812 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:42 crc kubenswrapper[4776]: E0127 19:11:42.319868 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:42 crc kubenswrapper[4776]: E0127 19:11:42.319999 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:43 crc kubenswrapper[4776]: I0127 19:11:43.319505 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:43 crc kubenswrapper[4776]: I0127 19:11:43.319614 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:43 crc kubenswrapper[4776]: E0127 19:11:43.319699 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:43 crc kubenswrapper[4776]: E0127 19:11:43.319838 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:44 crc kubenswrapper[4776]: I0127 19:11:44.318924 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:44 crc kubenswrapper[4776]: I0127 19:11:44.319011 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:44 crc kubenswrapper[4776]: E0127 19:11:44.319124 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:44 crc kubenswrapper[4776]: E0127 19:11:44.319403 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:44 crc kubenswrapper[4776]: I0127 19:11:44.338188 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 27 19:11:45 crc kubenswrapper[4776]: I0127 19:11:45.318832 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:45 crc kubenswrapper[4776]: I0127 19:11:45.318840 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:45 crc kubenswrapper[4776]: E0127 19:11:45.319164 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:45 crc kubenswrapper[4776]: E0127 19:11:45.319429 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:46 crc kubenswrapper[4776]: I0127 19:11:46.319107 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:46 crc kubenswrapper[4776]: I0127 19:11:46.319131 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:46 crc kubenswrapper[4776]: E0127 19:11:46.319319 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:46 crc kubenswrapper[4776]: E0127 19:11:46.319450 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:47 crc kubenswrapper[4776]: I0127 19:11:47.319214 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:47 crc kubenswrapper[4776]: I0127 19:11:47.319309 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:47 crc kubenswrapper[4776]: E0127 19:11:47.319603 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:47 crc kubenswrapper[4776]: E0127 19:11:47.319807 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:48 crc kubenswrapper[4776]: I0127 19:11:48.319041 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:48 crc kubenswrapper[4776]: I0127 19:11:48.320696 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:48 crc kubenswrapper[4776]: E0127 19:11:48.320663 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:48 crc kubenswrapper[4776]: E0127 19:11:48.321355 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:48 crc kubenswrapper[4776]: I0127 19:11:48.322092 4776 scope.go:117] "RemoveContainer" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" Jan 27 19:11:48 crc kubenswrapper[4776]: E0127 19:11:48.322473 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-d7d6j_openshift-ovn-kubernetes(0acf82d2-cb56-42c0-9762-895319b4c26c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podUID="0acf82d2-cb56-42c0-9762-895319b4c26c" Jan 27 19:11:48 crc kubenswrapper[4776]: I0127 19:11:48.359230 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=4.359206862 podStartE2EDuration="4.359206862s" podCreationTimestamp="2026-01-27 19:11:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:11:48.358305943 +0000 UTC m=+110.863911140" watchObservedRunningTime="2026-01-27 19:11:48.359206862 +0000 UTC m=+110.864812029" Jan 27 19:11:49 crc kubenswrapper[4776]: I0127 19:11:49.318563 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:49 crc kubenswrapper[4776]: E0127 19:11:49.318974 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:49 crc kubenswrapper[4776]: I0127 19:11:49.318663 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:49 crc kubenswrapper[4776]: E0127 19:11:49.319183 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:50 crc kubenswrapper[4776]: I0127 19:11:50.319600 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:50 crc kubenswrapper[4776]: I0127 19:11:50.319600 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:50 crc kubenswrapper[4776]: E0127 19:11:50.319783 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:50 crc kubenswrapper[4776]: E0127 19:11:50.319838 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:51 crc kubenswrapper[4776]: I0127 19:11:51.319001 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:51 crc kubenswrapper[4776]: I0127 19:11:51.319018 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:51 crc kubenswrapper[4776]: E0127 19:11:51.319265 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:51 crc kubenswrapper[4776]: E0127 19:11:51.319450 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:52 crc kubenswrapper[4776]: I0127 19:11:52.319042 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:52 crc kubenswrapper[4776]: I0127 19:11:52.319204 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:52 crc kubenswrapper[4776]: E0127 19:11:52.319386 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:52 crc kubenswrapper[4776]: E0127 19:11:52.319574 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:53 crc kubenswrapper[4776]: I0127 19:11:53.319034 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:53 crc kubenswrapper[4776]: I0127 19:11:53.318965 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:53 crc kubenswrapper[4776]: E0127 19:11:53.319254 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:53 crc kubenswrapper[4776]: E0127 19:11:53.319389 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:54 crc kubenswrapper[4776]: I0127 19:11:54.319346 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:54 crc kubenswrapper[4776]: I0127 19:11:54.319478 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:54 crc kubenswrapper[4776]: E0127 19:11:54.319606 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:54 crc kubenswrapper[4776]: E0127 19:11:54.319755 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:55 crc kubenswrapper[4776]: I0127 19:11:55.319442 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:55 crc kubenswrapper[4776]: I0127 19:11:55.319520 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:55 crc kubenswrapper[4776]: E0127 19:11:55.319566 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:55 crc kubenswrapper[4776]: E0127 19:11:55.319649 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:56 crc kubenswrapper[4776]: I0127 19:11:56.318694 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:56 crc kubenswrapper[4776]: I0127 19:11:56.318696 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:56 crc kubenswrapper[4776]: E0127 19:11:56.318856 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:56 crc kubenswrapper[4776]: E0127 19:11:56.318988 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:57 crc kubenswrapper[4776]: I0127 19:11:57.318692 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:57 crc kubenswrapper[4776]: I0127 19:11:57.318718 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:57 crc kubenswrapper[4776]: E0127 19:11:57.318997 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:57 crc kubenswrapper[4776]: E0127 19:11:57.319349 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:58 crc kubenswrapper[4776]: E0127 19:11:58.247260 4776 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.319510 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.319515 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:11:58 crc kubenswrapper[4776]: E0127 19:11:58.322141 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:11:58 crc kubenswrapper[4776]: E0127 19:11:58.322237 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.327348 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/1.log" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.328037 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/0.log" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.328122 4776 generic.go:334] "Generic (PLEG): container finished" podID="255485b5-2151-4328-b925-c2e1a1f95e1f" containerID="60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974" exitCode=1 Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.328167 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerDied","Data":"60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974"} Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.328228 4776 scope.go:117] "RemoveContainer" containerID="da8f01bc41da82a461b01fa4302e5e31adbef4f5ea69baeeceec08f1c4e986ad" Jan 27 19:11:58 crc kubenswrapper[4776]: I0127 19:11:58.328635 4776 scope.go:117] "RemoveContainer" containerID="60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974" Jan 27 19:11:58 crc kubenswrapper[4776]: E0127 19:11:58.328887 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-wmmtr_openshift-multus(255485b5-2151-4328-b925-c2e1a1f95e1f)\"" pod="openshift-multus/multus-wmmtr" podUID="255485b5-2151-4328-b925-c2e1a1f95e1f" Jan 27 19:11:58 crc kubenswrapper[4776]: E0127 19:11:58.387666 4776 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 19:11:59 crc kubenswrapper[4776]: I0127 19:11:59.318699 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:11:59 crc kubenswrapper[4776]: I0127 19:11:59.318830 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:11:59 crc kubenswrapper[4776]: E0127 19:11:59.318950 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:11:59 crc kubenswrapper[4776]: E0127 19:11:59.319121 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:11:59 crc kubenswrapper[4776]: I0127 19:11:59.334906 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/1.log" Jan 27 19:12:00 crc kubenswrapper[4776]: I0127 19:12:00.319004 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:00 crc kubenswrapper[4776]: I0127 19:12:00.319127 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:00 crc kubenswrapper[4776]: E0127 19:12:00.319661 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:00 crc kubenswrapper[4776]: E0127 19:12:00.319901 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:00 crc kubenswrapper[4776]: I0127 19:12:00.320250 4776 scope.go:117] "RemoveContainer" containerID="ae426f0b76dd79817e745d0e71a2e9c2c1c93ab8f13ebcb306a1b76dc04985e4" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.193505 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t7tbg"] Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.193947 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:01 crc kubenswrapper[4776]: E0127 19:12:01.194089 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.318988 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:01 crc kubenswrapper[4776]: E0127 19:12:01.319188 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.345348 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovnkube-controller/3.log" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.347733 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-d7d6j_0acf82d2-cb56-42c0-9762-895319b4c26c/ovn-acl-logging/0.log" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.348146 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" event={"ID":"0acf82d2-cb56-42c0-9762-895319b4c26c","Type":"ContainerStarted","Data":"68a932438cb93a530fcb169aef201caf1e231a96028396d820593349f5eeceb9"} Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.349179 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:12:01 crc kubenswrapper[4776]: I0127 19:12:01.385435 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" podStartSLOduration=98.385421152 podStartE2EDuration="1m38.385421152s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:01.382929235 +0000 UTC m=+123.888534382" watchObservedRunningTime="2026-01-27 19:12:01.385421152 +0000 UTC m=+123.891026299" Jan 27 19:12:02 crc kubenswrapper[4776]: I0127 19:12:02.318995 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:02 crc kubenswrapper[4776]: E0127 19:12:02.319159 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:02 crc kubenswrapper[4776]: I0127 19:12:02.319241 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:02 crc kubenswrapper[4776]: I0127 19:12:02.319320 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:02 crc kubenswrapper[4776]: E0127 19:12:02.320348 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:02 crc kubenswrapper[4776]: E0127 19:12:02.320647 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:03 crc kubenswrapper[4776]: I0127 19:12:03.318674 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:03 crc kubenswrapper[4776]: E0127 19:12:03.319018 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:03 crc kubenswrapper[4776]: E0127 19:12:03.389452 4776 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 19:12:04 crc kubenswrapper[4776]: I0127 19:12:04.319506 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:04 crc kubenswrapper[4776]: I0127 19:12:04.319589 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:04 crc kubenswrapper[4776]: E0127 19:12:04.319644 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:04 crc kubenswrapper[4776]: E0127 19:12:04.319721 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:04 crc kubenswrapper[4776]: I0127 19:12:04.319838 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:04 crc kubenswrapper[4776]: E0127 19:12:04.320027 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:05 crc kubenswrapper[4776]: I0127 19:12:05.319110 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:05 crc kubenswrapper[4776]: E0127 19:12:05.319326 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:06 crc kubenswrapper[4776]: I0127 19:12:06.318946 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:06 crc kubenswrapper[4776]: I0127 19:12:06.318955 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:06 crc kubenswrapper[4776]: I0127 19:12:06.319169 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:06 crc kubenswrapper[4776]: E0127 19:12:06.319130 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:06 crc kubenswrapper[4776]: E0127 19:12:06.319305 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:06 crc kubenswrapper[4776]: E0127 19:12:06.319411 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:07 crc kubenswrapper[4776]: I0127 19:12:07.319564 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:07 crc kubenswrapper[4776]: E0127 19:12:07.320083 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:08 crc kubenswrapper[4776]: I0127 19:12:08.319474 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:08 crc kubenswrapper[4776]: I0127 19:12:08.319512 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:08 crc kubenswrapper[4776]: I0127 19:12:08.319509 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:08 crc kubenswrapper[4776]: E0127 19:12:08.320948 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:08 crc kubenswrapper[4776]: E0127 19:12:08.321044 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:08 crc kubenswrapper[4776]: E0127 19:12:08.321146 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:08 crc kubenswrapper[4776]: E0127 19:12:08.390013 4776 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 27 19:12:09 crc kubenswrapper[4776]: I0127 19:12:09.318593 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:09 crc kubenswrapper[4776]: E0127 19:12:09.319077 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:09 crc kubenswrapper[4776]: I0127 19:12:09.319311 4776 scope.go:117] "RemoveContainer" containerID="60a9dedcb5b133aa5a67d6872908586c519e49937c1f38902edff10687f01974" Jan 27 19:12:10 crc kubenswrapper[4776]: I0127 19:12:10.319243 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:10 crc kubenswrapper[4776]: I0127 19:12:10.319316 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:10 crc kubenswrapper[4776]: E0127 19:12:10.319486 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:10 crc kubenswrapper[4776]: I0127 19:12:10.319553 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:10 crc kubenswrapper[4776]: E0127 19:12:10.319726 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:10 crc kubenswrapper[4776]: E0127 19:12:10.320107 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:10 crc kubenswrapper[4776]: I0127 19:12:10.381817 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-wmmtr_255485b5-2151-4328-b925-c2e1a1f95e1f/kube-multus/1.log" Jan 27 19:12:10 crc kubenswrapper[4776]: I0127 19:12:10.381900 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-wmmtr" event={"ID":"255485b5-2151-4328-b925-c2e1a1f95e1f","Type":"ContainerStarted","Data":"6ec106ec3e41bd2184b9b45ff47cbee7c52624ff2498a572a4304323d36dfe7b"} Jan 27 19:12:11 crc kubenswrapper[4776]: I0127 19:12:11.319493 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:11 crc kubenswrapper[4776]: E0127 19:12:11.319626 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:12 crc kubenswrapper[4776]: I0127 19:12:12.319495 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:12 crc kubenswrapper[4776]: I0127 19:12:12.319570 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:12 crc kubenswrapper[4776]: I0127 19:12:12.319505 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:12 crc kubenswrapper[4776]: E0127 19:12:12.319759 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-t7tbg" podUID="28a63e45-994f-41fe-82fb-605486d13b98" Jan 27 19:12:12 crc kubenswrapper[4776]: E0127 19:12:12.319889 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 27 19:12:12 crc kubenswrapper[4776]: E0127 19:12:12.319967 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 27 19:12:13 crc kubenswrapper[4776]: I0127 19:12:13.319194 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:13 crc kubenswrapper[4776]: E0127 19:12:13.319317 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.319082 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.319133 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.319146 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.321597 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.321604 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.322406 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.324586 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.324853 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 27 19:12:14 crc kubenswrapper[4776]: I0127 19:12:14.326898 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 27 19:12:15 crc kubenswrapper[4776]: I0127 19:12:15.318792 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.429388 4776 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.466213 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9xx7d"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.466683 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.466901 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.467195 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.468025 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.468569 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.470348 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.470896 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.473449 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.474026 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.474409 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tzpjt"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.484142 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.484502 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.484690 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.484883 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.485228 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.485411 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.485625 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.485875 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486071 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486122 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486250 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486409 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486597 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486881 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.486909 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.487550 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.491132 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.491425 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.491611 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.492191 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.492406 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.494000 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.494181 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.495344 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.495730 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.496312 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.500594 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.500794 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.501707 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.503928 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.504224 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.504358 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.507848 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.508326 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.508645 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.508990 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509171 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509232 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509273 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509449 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509610 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509797 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509851 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509888 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.509986 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.510090 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.510238 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.510380 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.510780 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.510990 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.511214 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.511879 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.512396 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-894r8"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.512879 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.515807 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.516469 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dghnj"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.516707 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.516877 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.517925 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n8wxr"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518080 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518160 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518230 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518397 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518714 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518742 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.518810 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.523902 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.524355 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.529858 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530070 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530376 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530418 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530476 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530583 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530674 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530975 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.531175 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.530625 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.531413 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.540623 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.540934 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.540990 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541125 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541174 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541320 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541355 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541420 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541496 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541613 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541647 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.541939 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.542047 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.542092 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.542198 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.543021 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.543742 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p4pr2"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.548262 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.550964 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.551934 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.557263 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558239 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558667 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558693 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558878 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558950 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.558969 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.559501 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.560383 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566142 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566158 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566309 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566313 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566545 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566623 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566744 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566858 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.566862 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.567058 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-b9hnh"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.567498 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.568525 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-cdd4p"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.568591 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.569045 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.570585 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.571018 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.571156 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.571304 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.571339 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.571650 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.574308 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.576483 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.576843 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.578063 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-vnp9r"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.578633 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.579128 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7pq2b"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.579932 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.580144 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.580662 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.581887 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.582313 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.584835 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.585281 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.591994 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.592589 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.592919 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.593056 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.593568 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.600203 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.600892 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601401 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601429 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601478 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d44v\" (UniqueName: \"kubernetes.io/projected/61b154b9-1f8d-461c-afa3-0c5204d5084f-kube-api-access-4d44v\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601498 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr4ws\" (UniqueName: \"kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601547 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4tqk\" (UniqueName: \"kubernetes.io/projected/db05b4c4-16ee-4a32-ac8c-eefda506c6db-kube-api-access-h4tqk\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601602 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5jts\" (UniqueName: \"kubernetes.io/projected/dafb70f2-b919-42f3-9f40-9f6183db8369-kube-api-access-x5jts\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601643 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601676 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601713 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvjjz\" (UniqueName: \"kubernetes.io/projected/68c010e7-6718-48f9-b901-f6047901b8b2-kube-api-access-dvjjz\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601744 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmwm9\" (UniqueName: \"kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601760 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-client\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601788 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-images\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601804 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-config\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601829 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601850 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-policies\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601874 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601891 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c010e7-6718-48f9-b901-f6047901b8b2-serving-cert\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.601955 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-serving-cert\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602076 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602219 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602283 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-node-pullsecrets\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602310 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-audit\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602327 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-image-import-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602355 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-serving-cert\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602372 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-dir\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602422 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602448 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-serving-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602465 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-encryption-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602480 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-client\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602498 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602530 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-audit-dir\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602549 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602563 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/68c010e7-6718-48f9-b901-f6047901b8b2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602595 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602639 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/db05b4c4-16ee-4a32-ac8c-eefda506c6db-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602663 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-encryption-config\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.602706 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.612635 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.613190 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-r869q"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.613563 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.615911 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.617444 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.623918 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.625413 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.625672 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.636917 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.617459 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.617694 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.638410 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.639314 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkb4r"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.639926 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.640205 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.640385 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.640899 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.641473 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.643524 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.654012 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.655640 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.663737 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-rkkzb"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.664522 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.664635 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.665563 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9xx7d"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.670813 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.670848 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.675178 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.683090 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.684604 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.684707 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.685435 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.686828 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.687935 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.691443 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.692118 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dghnj"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.694927 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n8wxr"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.695215 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-vnp9r"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.695457 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.704489 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705581 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-config\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705624 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdpdf\" (UniqueName: \"kubernetes.io/projected/d2485845-4359-4e05-9523-8e5e4a40d827-kube-api-access-kdpdf\") pod \"migrator-59844c95c7-wkkxn\" (UID: \"d2485845-4359-4e05-9523-8e5e4a40d827\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705643 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hng2h\" (UniqueName: \"kubernetes.io/projected/9e2f4f0c-a78b-40dc-855f-a411b72183c5-kube-api-access-hng2h\") pod \"downloads-7954f5f757-cdd4p\" (UID: \"9e2f4f0c-a78b-40dc-855f-a411b72183c5\") " pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705660 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705675 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4aa9ef2-3d89-409d-97a7-3990f846504a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705693 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705725 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4aa9ef2-3d89-409d-97a7-3990f846504a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705743 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1353bef9-e9b7-4242-afc2-1ff34d4da935-serving-cert\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705756 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705819 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnv7c\" (UniqueName: \"kubernetes.io/projected/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-kube-api-access-tnv7c\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705839 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705856 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705871 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705885 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705902 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62831e09-6067-4270-be11-d251956dffa2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705918 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53042356-fe0e-4cd4-ba21-301a7040c408-trusted-ca\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705934 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-policies\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705949 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62831e09-6067-4270-be11-d251956dffa2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705967 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qllm\" (UniqueName: \"kubernetes.io/projected/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-kube-api-access-4qllm\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.705986 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwbkd\" (UniqueName: \"kubernetes.io/projected/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-kube-api-access-jwbkd\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706004 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsjnw\" (UniqueName: \"kubernetes.io/projected/256c7026-dc32-4fd8-9250-608d1b5de8bf-kube-api-access-jsjnw\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706021 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-oauth-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706037 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr8gz\" (UniqueName: \"kubernetes.io/projected/1353bef9-e9b7-4242-afc2-1ff34d4da935-kube-api-access-gr8gz\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706058 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706075 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c010e7-6718-48f9-b901-f6047901b8b2-serving-cert\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706090 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a581f4-765e-458d-866b-fe1803351969-config\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706105 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4aa9ef2-3d89-409d-97a7-3990f846504a-config\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706128 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-serving-cert\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706143 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706160 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gglkw\" (UniqueName: \"kubernetes.io/projected/152fa2df-4029-4e25-aa4e-98005cac6226-kube-api-access-gglkw\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706176 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706192 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kljfj\" (UniqueName: \"kubernetes.io/projected/039a9234-6e8f-4854-b694-478409aacf35-kube-api-access-kljfj\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706207 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706223 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmvqp\" (UniqueName: \"kubernetes.io/projected/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-kube-api-access-hmvqp\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706238 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-node-pullsecrets\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706253 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-audit\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706267 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-image-import-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706282 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-serving-cert\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706295 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-dir\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706319 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-default-certificate\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706337 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706352 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/46a581f4-765e-458d-866b-fe1803351969-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706369 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706384 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53042356-fe0e-4cd4-ba21-301a7040c408-metrics-tls\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706399 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706412 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p46dc\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-kube-api-access-p46dc\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706429 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706452 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-serving-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706475 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vk2t\" (UniqueName: \"kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706495 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16630e9b-9226-44d4-86e9-48fda67033fe-serving-cert\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706985 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p4pr2"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.707038 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.706997 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-encryption-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.707108 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-client\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708327 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708398 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-trusted-ca-bundle\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708483 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-metrics-certs\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708508 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708533 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/152fa2df-4029-4e25-aa4e-98005cac6226-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708893 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h2v4\" (UniqueName: \"kubernetes.io/projected/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-kube-api-access-9h2v4\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708920 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708937 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-service-ca-bundle\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708958 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-audit-dir\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708976 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.708994 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/68c010e7-6718-48f9-b901-f6047901b8b2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709030 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709055 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-service-ca\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709071 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-images\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709088 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/db05b4c4-16ee-4a32-ac8c-eefda506c6db-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709108 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-encryption-config\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709125 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-config\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709142 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/33ba45e2-c084-450f-822d-2326de41ad36-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709159 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709174 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709191 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709213 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d44v\" (UniqueName: \"kubernetes.io/projected/61b154b9-1f8d-461c-afa3-0c5204d5084f-kube-api-access-4d44v\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709233 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-srv-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709249 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709266 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr4ws\" (UniqueName: \"kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709282 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4tqk\" (UniqueName: \"kubernetes.io/projected/db05b4c4-16ee-4a32-ac8c-eefda506c6db-kube-api-access-h4tqk\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709298 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-stats-auth\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709315 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46a581f4-765e-458d-866b-fe1803351969-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709332 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhncd\" (UniqueName: \"kubernetes.io/projected/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-kube-api-access-lhncd\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709348 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/256c7026-dc32-4fd8-9250-608d1b5de8bf-metrics-tls\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709363 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709379 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709396 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5jts\" (UniqueName: \"kubernetes.io/projected/dafb70f2-b919-42f3-9f40-9f6183db8369-kube-api-access-x5jts\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709411 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709426 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-oauth-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709432 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tzpjt"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709447 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33ba45e2-c084-450f-822d-2326de41ad36-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709537 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709574 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709606 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709634 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709658 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgmtm\" (UniqueName: \"kubernetes.io/projected/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-kube-api-access-cgmtm\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709674 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709690 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6lbg\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-kube-api-access-x6lbg\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709710 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/152fa2df-4029-4e25-aa4e-98005cac6226-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709727 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-trusted-ca\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709742 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709758 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709812 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvjjz\" (UniqueName: \"kubernetes.io/projected/68c010e7-6718-48f9-b901-f6047901b8b2-kube-api-access-dvjjz\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709830 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709848 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709869 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709892 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-proxy-tls\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709914 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbf2p\" (UniqueName: \"kubernetes.io/projected/62831e09-6067-4270-be11-d251956dffa2-kube-api-access-hbf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709930 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-console-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709957 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-service-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709980 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmwm9\" (UniqueName: \"kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.709998 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-profile-collector-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.710015 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-config\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.710030 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt6gt\" (UniqueName: \"kubernetes.io/projected/16630e9b-9226-44d4-86e9-48fda67033fe-kube-api-access-kt6gt\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.710049 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-client\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.710065 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-images\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.710874 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-images\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.713285 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.713411 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-audit-dir\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.714372 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/68c010e7-6718-48f9-b901-f6047901b8b2-available-featuregates\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.715441 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-trusted-ca-bundle\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.715842 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-client\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.715963 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-dir\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.716342 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.716740 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.716942 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.717085 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db05b4c4-16ee-4a32-ac8c-eefda506c6db-config\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.717172 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/dafb70f2-b919-42f3-9f40-9f6183db8369-node-pullsecrets\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.717224 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-audit\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.717440 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.717532 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.718097 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.718653 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.719200 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/61b154b9-1f8d-461c-afa3-0c5204d5084f-audit-policies\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.719386 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.719419 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-serving-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.719605 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-encryption-config\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.719955 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.720583 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/dafb70f2-b919-42f3-9f40-9f6183db8369-image-import-ca\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.722483 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-encryption-config\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.723092 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c010e7-6718-48f9-b901-f6047901b8b2-serving-cert\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.723147 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-etcd-client\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.723190 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61b154b9-1f8d-461c-afa3-0c5204d5084f-serving-cert\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.723761 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.725995 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dafb70f2-b919-42f3-9f40-9f6183db8369-serving-cert\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.732055 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.738298 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/db05b4c4-16ee-4a32-ac8c-eefda506c6db-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.739391 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-894r8"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.740878 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7pq2b"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.742297 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.743875 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-r869q"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.744826 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.745802 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.747004 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.748068 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.749050 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.749105 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r28v4"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.750141 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.750325 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.751482 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-zszr6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.752108 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.752817 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cdd4p"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.754158 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.755662 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkb4r"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.756962 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.758380 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.759147 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.760988 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.762269 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.763503 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-zszr6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.764590 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rkkzb"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.765902 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r28v4"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.766609 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.767560 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-pz2qz"] Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.769749 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.769878 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.789648 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.810916 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811635 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-stats-auth\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811737 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46a581f4-765e-458d-866b-fe1803351969-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811808 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhncd\" (UniqueName: \"kubernetes.io/projected/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-kube-api-access-lhncd\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811856 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/256c7026-dc32-4fd8-9250-608d1b5de8bf-metrics-tls\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811899 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.811932 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812060 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812103 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-oauth-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812160 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33ba45e2-c084-450f-822d-2326de41ad36-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812229 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812344 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812456 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgmtm\" (UniqueName: \"kubernetes.io/projected/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-kube-api-access-cgmtm\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812496 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812595 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6lbg\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-kube-api-access-x6lbg\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812644 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/152fa2df-4029-4e25-aa4e-98005cac6226-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812684 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-trusted-ca\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812723 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.812841 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813009 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813084 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813159 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813226 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-proxy-tls\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813277 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbf2p\" (UniqueName: \"kubernetes.io/projected/62831e09-6067-4270-be11-d251956dffa2-kube-api-access-hbf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813377 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-console-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813438 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-service-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813514 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-profile-collector-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813560 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-config\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813569 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813599 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt6gt\" (UniqueName: \"kubernetes.io/projected/16630e9b-9226-44d4-86e9-48fda67033fe-kube-api-access-kt6gt\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813733 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdpdf\" (UniqueName: \"kubernetes.io/projected/d2485845-4359-4e05-9523-8e5e4a40d827-kube-api-access-kdpdf\") pod \"migrator-59844c95c7-wkkxn\" (UID: \"d2485845-4359-4e05-9523-8e5e4a40d827\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813801 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hng2h\" (UniqueName: \"kubernetes.io/projected/9e2f4f0c-a78b-40dc-855f-a411b72183c5-kube-api-access-hng2h\") pod \"downloads-7954f5f757-cdd4p\" (UID: \"9e2f4f0c-a78b-40dc-855f-a411b72183c5\") " pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813869 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.813942 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4aa9ef2-3d89-409d-97a7-3990f846504a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814043 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814147 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4aa9ef2-3d89-409d-97a7-3990f846504a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814241 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1353bef9-e9b7-4242-afc2-1ff34d4da935-serving-cert\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814315 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814355 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnv7c\" (UniqueName: \"kubernetes.io/projected/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-kube-api-access-tnv7c\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814397 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814464 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814559 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814663 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62831e09-6067-4270-be11-d251956dffa2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814709 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53042356-fe0e-4cd4-ba21-301a7040c408-trusted-ca\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814808 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62831e09-6067-4270-be11-d251956dffa2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814871 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qllm\" (UniqueName: \"kubernetes.io/projected/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-kube-api-access-4qllm\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814913 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwbkd\" (UniqueName: \"kubernetes.io/projected/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-kube-api-access-jwbkd\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814954 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsjnw\" (UniqueName: \"kubernetes.io/projected/256c7026-dc32-4fd8-9250-608d1b5de8bf-kube-api-access-jsjnw\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.815100 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-stats-auth\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.815641 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.815110 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-oauth-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817116 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr8gz\" (UniqueName: \"kubernetes.io/projected/1353bef9-e9b7-4242-afc2-1ff34d4da935-kube-api-access-gr8gz\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817166 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a581f4-765e-458d-866b-fe1803351969-config\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817209 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4aa9ef2-3d89-409d-97a7-3990f846504a-config\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817293 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gglkw\" (UniqueName: \"kubernetes.io/projected/152fa2df-4029-4e25-aa4e-98005cac6226-kube-api-access-gglkw\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817346 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817390 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kljfj\" (UniqueName: \"kubernetes.io/projected/039a9234-6e8f-4854-b694-478409aacf35-kube-api-access-kljfj\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817426 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817474 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmvqp\" (UniqueName: \"kubernetes.io/projected/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-kube-api-access-hmvqp\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817561 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-default-certificate\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817597 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/33ba45e2-c084-450f-822d-2326de41ad36-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817613 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/46a581f4-765e-458d-866b-fe1803351969-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817670 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817717 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53042356-fe0e-4cd4-ba21-301a7040c408-metrics-tls\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817758 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817811 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p46dc\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-kube-api-access-p46dc\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817845 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817879 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vk2t\" (UniqueName: \"kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817913 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16630e9b-9226-44d4-86e9-48fda67033fe-serving-cert\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817958 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-trusted-ca-bundle\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817965 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-config\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817990 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-metrics-certs\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818133 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818081 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818285 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/152fa2df-4029-4e25-aa4e-98005cac6226-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818317 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h2v4\" (UniqueName: \"kubernetes.io/projected/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-kube-api-access-9h2v4\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818369 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818397 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-service-ca-bundle\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818459 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-service-ca\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818497 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-images\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818524 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-config\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818528 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818546 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/33ba45e2-c084-450f-822d-2326de41ad36-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818572 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818608 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-srv-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.818640 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.817114 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-service-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.819290 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1353bef9-e9b7-4242-afc2-1ff34d4da935-trusted-ca\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.819736 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.820381 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4aa9ef2-3d89-409d-97a7-3990f846504a-config\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.821016 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.821546 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.821658 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-metrics-certs\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.822246 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.822270 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62831e09-6067-4270-be11-d251956dffa2-config\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.822477 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.814735 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.822888 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/53042356-fe0e-4cd4-ba21-301a7040c408-trusted-ca\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.823622 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-service-ca-bundle\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.823846 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.824563 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.824614 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16630e9b-9226-44d4-86e9-48fda67033fe-config\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.824747 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.825934 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.826023 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/62831e09-6067-4270-be11-d251956dffa2-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.825979 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.826159 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/53042356-fe0e-4cd4-ba21-301a7040c408-metrics-tls\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.826527 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.826629 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.826668 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.830411 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4aa9ef2-3d89-409d-97a7-3990f846504a-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.830601 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/33ba45e2-c084-450f-822d-2326de41ad36-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.830737 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.830759 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.830846 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.831192 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1353bef9-e9b7-4242-afc2-1ff34d4da935-serving-cert\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.831386 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/16630e9b-9226-44d4-86e9-48fda67033fe-serving-cert\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.831529 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-default-certificate\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.831735 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.850588 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.878518 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.886263 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-trusted-ca-bundle\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.889999 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.895322 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-oauth-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.910046 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.917892 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-console-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.930504 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.937231 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-serving-cert\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.949596 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.960476 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/039a9234-6e8f-4854-b694-478409aacf35-service-ca\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.969531 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.979495 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/039a9234-6e8f-4854-b694-478409aacf35-console-oauth-config\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:18 crc kubenswrapper[4776]: I0127 19:12:18.990113 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.009733 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.029715 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.036189 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/256c7026-dc32-4fd8-9250-608d1b5de8bf-metrics-tls\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.049549 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.069493 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.090238 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.111024 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.129681 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.150123 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.169535 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.190292 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.196533 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46a581f4-765e-458d-866b-fe1803351969-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.211003 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.220729 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46a581f4-765e-458d-866b-fe1803351969-config\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.229326 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.249755 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.270209 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.283344 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/152fa2df-4029-4e25-aa4e-98005cac6226-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.290793 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.310181 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.317107 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/152fa2df-4029-4e25-aa4e-98005cac6226-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.330524 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.350282 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.370352 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.380496 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-proxy-tls\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.390520 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.410583 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.415000 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-images\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.430117 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.445753 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.449276 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.470082 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.489828 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.510542 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.519592 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-profile-collector-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.530216 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.535996 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-srv-cert\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.569281 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.589865 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.609108 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.628035 4776 request.go:700] Waited for 1.002380191s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.629835 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.649240 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.669636 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.690051 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.708988 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.730684 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.750752 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.769865 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.790503 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.813260 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.829906 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.849993 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.870008 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.889461 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.909952 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.930858 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.950802 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.971139 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 27 19:12:19 crc kubenswrapper[4776]: I0127 19:12:19.989641 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.009552 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.029218 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.050191 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.076376 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.089660 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.110683 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.129656 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.172839 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.189785 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.209999 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.230258 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.251339 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.270955 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.290189 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.310275 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.329539 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.350184 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.388846 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5jts\" (UniqueName: \"kubernetes.io/projected/dafb70f2-b919-42f3-9f40-9f6183db8369-kube-api-access-x5jts\") pod \"apiserver-76f77b778f-9xx7d\" (UID: \"dafb70f2-b919-42f3-9f40-9f6183db8369\") " pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.404552 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d44v\" (UniqueName: \"kubernetes.io/projected/61b154b9-1f8d-461c-afa3-0c5204d5084f-kube-api-access-4d44v\") pod \"apiserver-7bbb656c7d-5k2fv\" (UID: \"61b154b9-1f8d-461c-afa3-0c5204d5084f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.427241 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr4ws\" (UniqueName: \"kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws\") pod \"controller-manager-879f6c89f-rnpjf\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.450310 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4tqk\" (UniqueName: \"kubernetes.io/projected/db05b4c4-16ee-4a32-ac8c-eefda506c6db-kube-api-access-h4tqk\") pod \"machine-api-operator-5694c8668f-tzpjt\" (UID: \"db05b4c4-16ee-4a32-ac8c-eefda506c6db\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.469973 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvjjz\" (UniqueName: \"kubernetes.io/projected/68c010e7-6718-48f9-b901-f6047901b8b2-kube-api-access-dvjjz\") pod \"openshift-config-operator-7777fb866f-p5zwn\" (UID: \"68c010e7-6718-48f9-b901-f6047901b8b2\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.490619 4776 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.512491 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.530406 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.550132 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.565806 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmwm9\" (UniqueName: \"kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9\") pod \"route-controller-manager-6576b87f9c-cxv6m\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.569697 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.589870 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.610162 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.610357 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.620438 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.628422 4776 request.go:700] Waited for 1.858308024s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dnode-bootstrapper-token&limit=500&resourceVersion=0 Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.631237 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.642630 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.642898 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.651365 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.668792 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.693470 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.695570 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhncd\" (UniqueName: \"kubernetes.io/projected/e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9-kube-api-access-lhncd\") pod \"multus-admission-controller-857f4d67dd-p4pr2\" (UID: \"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.705349 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.728660 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgmtm\" (UniqueName: \"kubernetes.io/projected/de3063f6-acdd-4c62-9d94-d5dd82b0fda1-kube-api-access-cgmtm\") pod \"package-server-manager-789f6589d5-m657q\" (UID: \"de3063f6-acdd-4c62-9d94-d5dd82b0fda1\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.750357 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6lbg\" (UniqueName: \"kubernetes.io/projected/33ba45e2-c084-450f-822d-2326de41ad36-kube-api-access-x6lbg\") pod \"cluster-image-registry-operator-dc59b4c8b-dfrfs\" (UID: \"33ba45e2-c084-450f-822d-2326de41ad36\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.759388 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.761923 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbf2p\" (UniqueName: \"kubernetes.io/projected/62831e09-6067-4270-be11-d251956dffa2-kube-api-access-hbf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-f4dn6\" (UID: \"62831e09-6067-4270-be11-d251956dffa2\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.795461 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt6gt\" (UniqueName: \"kubernetes.io/projected/16630e9b-9226-44d4-86e9-48fda67033fe-kube-api-access-kt6gt\") pod \"authentication-operator-69f744f599-n8wxr\" (UID: \"16630e9b-9226-44d4-86e9-48fda67033fe\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.800251 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.805602 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdpdf\" (UniqueName: \"kubernetes.io/projected/d2485845-4359-4e05-9523-8e5e4a40d827-kube-api-access-kdpdf\") pod \"migrator-59844c95c7-wkkxn\" (UID: \"d2485845-4359-4e05-9523-8e5e4a40d827\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.826169 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hng2h\" (UniqueName: \"kubernetes.io/projected/9e2f4f0c-a78b-40dc-855f-a411b72183c5-kube-api-access-hng2h\") pod \"downloads-7954f5f757-cdd4p\" (UID: \"9e2f4f0c-a78b-40dc-855f-a411b72183c5\") " pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.845210 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr8gz\" (UniqueName: \"kubernetes.io/projected/1353bef9-e9b7-4242-afc2-1ff34d4da935-kube-api-access-gr8gz\") pod \"console-operator-58897d9998-894r8\" (UID: \"1353bef9-e9b7-4242-afc2-1ff34d4da935\") " pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.851401 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.869389 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.875280 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h2v4\" (UniqueName: \"kubernetes.io/projected/52f1c1ca-0b7a-4d48-b967-1d4525e220e1-kube-api-access-9h2v4\") pod \"catalog-operator-68c6474976-2qjhm\" (UID: \"52f1c1ca-0b7a-4d48-b967-1d4525e220e1\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.884685 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.956024 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gglkw\" (UniqueName: \"kubernetes.io/projected/152fa2df-4029-4e25-aa4e-98005cac6226-kube-api-access-gglkw\") pod \"kube-storage-version-migrator-operator-b67b599dd-j9lwf\" (UID: \"152fa2df-4029-4e25-aa4e-98005cac6226\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.972973 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwbkd\" (UniqueName: \"kubernetes.io/projected/c9b30cf2-d2a9-4e2c-896d-60a54109e9ee-kube-api-access-jwbkd\") pod \"cluster-samples-operator-665b6dd947-bktbd\" (UID: \"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.974226 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kljfj\" (UniqueName: \"kubernetes.io/projected/039a9234-6e8f-4854-b694-478409aacf35-kube-api-access-kljfj\") pod \"console-f9d7485db-vnp9r\" (UID: \"039a9234-6e8f-4854-b694-478409aacf35\") " pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.975713 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmvqp\" (UniqueName: \"kubernetes.io/projected/6c7a556d-56ac-4392-8f4e-c2c824ab5e90-kube-api-access-hmvqp\") pod \"machine-config-operator-74547568cd-bqsqh\" (UID: \"6c7a556d-56ac-4392-8f4e-c2c824ab5e90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.976220 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.989419 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4aa9ef2-3d89-409d-97a7-3990f846504a-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-sp9rh\" (UID: \"c4aa9ef2-3d89-409d-97a7-3990f846504a\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:20 crc kubenswrapper[4776]: I0127 19:12:20.996246 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:20.999915 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.005158 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qllm\" (UniqueName: \"kubernetes.io/projected/f0d8de85-33c3-4bd0-9589-f140ba0b77c4-kube-api-access-4qllm\") pod \"openshift-controller-manager-operator-756b6f6bc6-dvh8z\" (UID: \"f0d8de85-33c3-4bd0-9589-f140ba0b77c4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.010002 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.010785 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.016025 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.021309 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.038781 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.043369 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsjnw\" (UniqueName: \"kubernetes.io/projected/256c7026-dc32-4fd8-9250-608d1b5de8bf-kube-api-access-jsjnw\") pod \"dns-operator-744455d44c-7pq2b\" (UID: \"256c7026-dc32-4fd8-9250-608d1b5de8bf\") " pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.043755 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.048914 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-9xx7d"] Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.053585 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnv7c\" (UniqueName: \"kubernetes.io/projected/fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a-kube-api-access-tnv7c\") pod \"router-default-5444994796-b9hnh\" (UID: \"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a\") " pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.067048 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p46dc\" (UniqueName: \"kubernetes.io/projected/53042356-fe0e-4cd4-ba21-301a7040c408-kube-api-access-p46dc\") pod \"ingress-operator-5b745b69d9-k4vpf\" (UID: \"53042356-fe0e-4cd4-ba21-301a7040c408\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.072118 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.073924 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv"] Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.083083 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.088653 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a01b7ee4-02a1-4b53-81bf-7367b7e025a5-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-nvrdm\" (UID: \"a01b7ee4-02a1-4b53-81bf-7367b7e025a5\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.106740 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/46a581f4-765e-458d-866b-fe1803351969-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-49rr2\" (UID: \"46a581f4-765e-458d-866b-fe1803351969\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.119088 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.140597 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vk2t\" (UniqueName: \"kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t\") pod \"oauth-openshift-558db77b4-dghnj\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:21 crc kubenswrapper[4776]: W0127 19:12:21.145677 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddafb70f2_b919_42f3_9f40_9f6183db8369.slice/crio-ff35ce1eadbd272a8d982d407070b80d0b7fb6f47959cd16e6df4fd0bf3863f3 WatchSource:0}: Error finding container ff35ce1eadbd272a8d982d407070b80d0b7fb6f47959cd16e6df4fd0bf3863f3: Status 404 returned error can't find the container with id ff35ce1eadbd272a8d982d407070b80d0b7fb6f47959cd16e6df4fd0bf3863f3 Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154750 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154824 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtnjp\" (UniqueName: \"kubernetes.io/projected/59235ec5-26bb-4966-a543-db238811abe6-kube-api-access-xtnjp\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154854 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154905 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154925 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.154994 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.155033 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.155112 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv2kp\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.155144 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/59235ec5-26bb-4966-a543-db238811abe6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.155165 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/59235ec5-26bb-4966-a543-db238811abe6-proxy-tls\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.155203 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.155659 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:21.655629812 +0000 UTC m=+144.161234959 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.159288 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.176140 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.197979 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.214543 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259343 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259793 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-key\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259821 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-certs\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259857 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rpq2\" (UniqueName: \"kubernetes.io/projected/921a2ca9-0cb3-401a-a978-a70a63d9418d-kube-api-access-6rpq2\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259878 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-service-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259926 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-client\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259973 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.259994 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260016 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-mountpoint-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260063 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtnjp\" (UniqueName: \"kubernetes.io/projected/59235ec5-26bb-4966-a543-db238811abe6-kube-api-access-xtnjp\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260083 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-plugins-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260166 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260212 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/921a2ca9-0cb3-401a-a978-a70a63d9418d-config-volume\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260233 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260253 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8v9n\" (UniqueName: \"kubernetes.io/projected/254e5b81-f470-4b03-a703-bc60a100d4da-kube-api-access-l8v9n\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260272 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260358 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260379 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-webhook-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260467 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-socket-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260524 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-registration-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260586 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260605 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-cabundle\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260625 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260673 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260694 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqn8b\" (UniqueName: \"kubernetes.io/projected/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-kube-api-access-zqn8b\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260736 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-srv-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260754 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-csi-data-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260798 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5clqb\" (UniqueName: \"kubernetes.io/projected/986fcda6-5c06-431b-bba6-d108dbeb3d9f-kube-api-access-5clqb\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260822 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-node-bootstrap-token\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.260845 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:21.760825686 +0000 UTC m=+144.266430833 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260921 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-config\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260964 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plkp7\" (UniqueName: \"kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.260986 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/62e0f316-4ab0-4e56-9874-3008b20d930e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261004 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlh5j\" (UniqueName: \"kubernetes.io/projected/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-kube-api-access-wlh5j\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261056 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261128 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmqj6\" (UniqueName: \"kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261167 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261194 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26eec851-c48a-47af-92a9-3fd33f385efe-config\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261214 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26eec851-c48a-47af-92a9-3fd33f385efe-serving-cert\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261251 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-auth-proxy-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261275 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-serving-cert\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261330 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e117e37f-f455-4bfc-b859-b577cc25f4d8-cert\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261374 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frc9g\" (UniqueName: \"kubernetes.io/projected/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-kube-api-access-frc9g\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.261404 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/921a2ca9-0cb3-401a-a978-a70a63d9418d-metrics-tls\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.265070 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.267375 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.267926 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv2kp\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.267967 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-tmpfs\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.267989 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-apiservice-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.268973 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxlwm\" (UniqueName: \"kubernetes.io/projected/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-kube-api-access-kxlwm\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269426 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2qgp\" (UniqueName: \"kubernetes.io/projected/e117e37f-f455-4bfc-b859-b577cc25f4d8-kube-api-access-m2qgp\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269497 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca924069-e7d6-4520-94d9-98f18ed4002e-machine-approver-tls\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269618 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgdhj\" (UniqueName: \"kubernetes.io/projected/62e0f316-4ab0-4e56-9874-3008b20d930e-kube-api-access-pgdhj\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269745 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269928 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/59235ec5-26bb-4966-a543-db238811abe6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.269971 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/59235ec5-26bb-4966-a543-db238811abe6-proxy-tls\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.270791 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkjqn\" (UniqueName: \"kubernetes.io/projected/ca924069-e7d6-4520-94d9-98f18ed4002e-kube-api-access-xkjqn\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.270863 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/59235ec5-26bb-4966-a543-db238811abe6-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.270893 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.270936 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7zbq\" (UniqueName: \"kubernetes.io/projected/26eec851-c48a-47af-92a9-3fd33f385efe-kube-api-access-s7zbq\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.272879 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.274049 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.274502 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.275137 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/59235ec5-26bb-4966-a543-db238811abe6-proxy-tls\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.283909 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.289308 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.329553 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtnjp\" (UniqueName: \"kubernetes.io/projected/59235ec5-26bb-4966-a543-db238811abe6-kube-api-access-xtnjp\") pod \"machine-config-controller-84d6567774-zsvdf\" (UID: \"59235ec5-26bb-4966-a543-db238811abe6\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.355449 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv2kp\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374705 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/921a2ca9-0cb3-401a-a978-a70a63d9418d-config-volume\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374741 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374762 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8v9n\" (UniqueName: \"kubernetes.io/projected/254e5b81-f470-4b03-a703-bc60a100d4da-kube-api-access-l8v9n\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374799 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374823 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374842 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-webhook-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374873 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-socket-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374889 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-registration-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374905 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-cabundle\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374920 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374935 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqn8b\" (UniqueName: \"kubernetes.io/projected/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-kube-api-access-zqn8b\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374951 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-srv-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374967 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-csi-data-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374981 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5clqb\" (UniqueName: \"kubernetes.io/projected/986fcda6-5c06-431b-bba6-d108dbeb3d9f-kube-api-access-5clqb\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.374996 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-node-bootstrap-token\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375014 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-config\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375031 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plkp7\" (UniqueName: \"kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375047 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/62e0f316-4ab0-4e56-9874-3008b20d930e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375065 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlh5j\" (UniqueName: \"kubernetes.io/projected/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-kube-api-access-wlh5j\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375087 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375112 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmqj6\" (UniqueName: \"kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375132 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375148 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26eec851-c48a-47af-92a9-3fd33f385efe-config\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375168 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26eec851-c48a-47af-92a9-3fd33f385efe-serving-cert\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375187 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-auth-proxy-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375208 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-serving-cert\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375232 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e117e37f-f455-4bfc-b859-b577cc25f4d8-cert\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375248 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frc9g\" (UniqueName: \"kubernetes.io/projected/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-kube-api-access-frc9g\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375289 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/921a2ca9-0cb3-401a-a978-a70a63d9418d-metrics-tls\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375315 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-tmpfs\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375332 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-apiservice-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375355 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2qgp\" (UniqueName: \"kubernetes.io/projected/e117e37f-f455-4bfc-b859-b577cc25f4d8-kube-api-access-m2qgp\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375373 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxlwm\" (UniqueName: \"kubernetes.io/projected/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-kube-api-access-kxlwm\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375396 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca924069-e7d6-4520-94d9-98f18ed4002e-machine-approver-tls\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375422 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgdhj\" (UniqueName: \"kubernetes.io/projected/62e0f316-4ab0-4e56-9874-3008b20d930e-kube-api-access-pgdhj\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375444 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkjqn\" (UniqueName: \"kubernetes.io/projected/ca924069-e7d6-4520-94d9-98f18ed4002e-kube-api-access-xkjqn\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375464 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7zbq\" (UniqueName: \"kubernetes.io/projected/26eec851-c48a-47af-92a9-3fd33f385efe-kube-api-access-s7zbq\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375486 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375509 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-key\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375529 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-certs\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375559 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rpq2\" (UniqueName: \"kubernetes.io/projected/921a2ca9-0cb3-401a-a978-a70a63d9418d-kube-api-access-6rpq2\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375579 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-service-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375600 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-client\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375621 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375642 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-mountpoint-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.375662 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-plugins-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.376642 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-plugins-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.377850 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-csi-data-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.378342 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/921a2ca9-0cb3-401a-a978-a70a63d9418d-config-volume\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379042 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-config\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379249 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26eec851-c48a-47af-92a9-3fd33f385efe-config\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379364 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379411 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-service-ca\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379643 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379716 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-mountpoint-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.379894 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.380498 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-cabundle\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.380566 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-socket-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.380618 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/986fcda6-5c06-431b-bba6-d108dbeb3d9f-registration-dir\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.380851 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:21.880838402 +0000 UTC m=+144.386443549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.382238 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.386097 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ca924069-e7d6-4520-94d9-98f18ed4002e-auth-proxy-config\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.386318 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-tmpfs\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.392677 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.401674 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-etcd-client\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.403546 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ca924069-e7d6-4520-94d9-98f18ed4002e-machine-approver-tls\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.404255 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-node-bootstrap-token\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.404252 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-signing-key\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.404562 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-certs\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.405088 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-webhook-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.408677 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/62e0f316-4ab0-4e56-9874-3008b20d930e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.408961 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.409983 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.410056 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.411223 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/26eec851-c48a-47af-92a9-3fd33f385efe-serving-cert\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.415581 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-apiservice-cert\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.415669 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e117e37f-f455-4bfc-b859-b577cc25f4d8-cert\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.415927 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/254e5b81-f470-4b03-a703-bc60a100d4da-serving-cert\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.415943 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/921a2ca9-0cb3-401a-a978-a70a63d9418d-metrics-tls\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.416138 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-srv-cert\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.421656 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" event={"ID":"61b154b9-1f8d-461c-afa3-0c5204d5084f","Type":"ContainerStarted","Data":"bf8fe468fd45c8ae7207f07d015a14ca784c3416ecff8df79feb075f1e67ba80"} Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.422855 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b9hnh" event={"ID":"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a","Type":"ContainerStarted","Data":"9ec964082f1aa296f2717ef1aa085d6a2bfe50cccbc2d9abcd7fbcdbc557c1cb"} Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.424094 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" event={"ID":"dafb70f2-b919-42f3-9f40-9f6183db8369","Type":"ContainerStarted","Data":"ff35ce1eadbd272a8d982d407070b80d0b7fb6f47959cd16e6df4fd0bf3863f3"} Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.427006 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" event={"ID":"dfecd87d-0e25-4116-ae11-72e607a011dd","Type":"ContainerStarted","Data":"11c72072ddd9687b71c60c27ad975f4cd711fc7725e05d087513bcf82a869eb4"} Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.431831 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plkp7\" (UniqueName: \"kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7\") pod \"collect-profiles-29492340-zdtq6\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.469982 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5clqb\" (UniqueName: \"kubernetes.io/projected/986fcda6-5c06-431b-bba6-d108dbeb3d9f-kube-api-access-5clqb\") pod \"csi-hostpathplugin-r28v4\" (UID: \"986fcda6-5c06-431b-bba6-d108dbeb3d9f\") " pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.476932 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.477112 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:21.977078714 +0000 UTC m=+144.482683871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.477294 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.477838 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:21.977817667 +0000 UTC m=+144.483422884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.496627 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmqj6\" (UniqueName: \"kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6\") pod \"marketplace-operator-79b997595-tqlbs\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.501380 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgdhj\" (UniqueName: \"kubernetes.io/projected/62e0f316-4ab0-4e56-9874-3008b20d930e-kube-api-access-pgdhj\") pod \"control-plane-machine-set-operator-78cbb6b69f-rfx55\" (UID: \"62e0f316-4ab0-4e56-9874-3008b20d930e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.528143 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7zbq\" (UniqueName: \"kubernetes.io/projected/26eec851-c48a-47af-92a9-3fd33f385efe-kube-api-access-s7zbq\") pod \"service-ca-operator-777779d784-rqtqp\" (UID: \"26eec851-c48a-47af-92a9-3fd33f385efe\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.547963 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkjqn\" (UniqueName: \"kubernetes.io/projected/ca924069-e7d6-4520-94d9-98f18ed4002e-kube-api-access-xkjqn\") pod \"machine-approver-56656f9798-xspbt\" (UID: \"ca924069-e7d6-4520-94d9-98f18ed4002e\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.573591 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlh5j\" (UniqueName: \"kubernetes.io/projected/c03dcab7-3d8d-424a-9e6c-ce689f5cb077-kube-api-access-wlh5j\") pod \"service-ca-9c57cc56f-fkb4r\" (UID: \"c03dcab7-3d8d-424a-9e6c-ce689f5cb077\") " pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.574080 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8v9n\" (UniqueName: \"kubernetes.io/projected/254e5b81-f470-4b03-a703-bc60a100d4da-kube-api-access-l8v9n\") pod \"etcd-operator-b45778765-r869q\" (UID: \"254e5b81-f470-4b03-a703-bc60a100d4da\") " pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.575018 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.578524 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.578662 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.078638688 +0000 UTC m=+144.584243835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.578906 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.579232 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.079223216 +0000 UTC m=+144.584828363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.584608 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqn8b\" (UniqueName: \"kubernetes.io/projected/8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e-kube-api-access-zqn8b\") pod \"olm-operator-6b444d44fb-9d8s8\" (UID: \"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.587362 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn"] Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.590979 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.608523 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frc9g\" (UniqueName: \"kubernetes.io/projected/f1e5fa8d-e0b4-4e13-8954-706f606b84fa-kube-api-access-frc9g\") pod \"machine-config-server-pz2qz\" (UID: \"f1e5fa8d-e0b4-4e13-8954-706f606b84fa\") " pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.628859 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2qgp\" (UniqueName: \"kubernetes.io/projected/e117e37f-f455-4bfc-b859-b577cc25f4d8-kube-api-access-m2qgp\") pod \"ingress-canary-rkkzb\" (UID: \"e117e37f-f455-4bfc-b859-b577cc25f4d8\") " pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.629166 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.637048 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.644588 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.647765 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxlwm\" (UniqueName: \"kubernetes.io/projected/b05a7129-f82d-43ee-ac7e-9c2d1122a4d7-kube-api-access-kxlwm\") pod \"packageserver-d55dfcdfc-gf6ml\" (UID: \"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.655394 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.664338 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.671881 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rpq2\" (UniqueName: \"kubernetes.io/projected/921a2ca9-0cb3-401a-a978-a70a63d9418d-kube-api-access-6rpq2\") pod \"dns-default-zszr6\" (UID: \"921a2ca9-0cb3-401a-a978-a70a63d9418d\") " pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.673214 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.680560 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.680683 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.180657106 +0000 UTC m=+144.686262253 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.680873 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.681217 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.181207933 +0000 UTC m=+144.686813070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.681728 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.692246 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.700976 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-rkkzb" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.707986 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" Jan 27 19:12:21 crc kubenswrapper[4776]: W0127 19:12:21.720476 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc944923b_00f1_4f4f_a883_aeb69d86c343.slice/crio-0aeacce88ca5edd1e070c5c921d68051a6ea8689d0391f56a2e7e4bf054f17c2 WatchSource:0}: Error finding container 0aeacce88ca5edd1e070c5c921d68051a6ea8689d0391f56a2e7e4bf054f17c2: Status 404 returned error can't find the container with id 0aeacce88ca5edd1e070c5c921d68051a6ea8689d0391f56a2e7e4bf054f17c2 Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.729714 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.739850 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.748566 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-pz2qz" Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.782095 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.782242 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.282219581 +0000 UTC m=+144.787824738 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.782386 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.782814 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.282802199 +0000 UTC m=+144.788407346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: W0127 19:12:21.813500 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca924069_e7d6_4520_94d9_98f18ed4002e.slice/crio-537af44346e21cb2800cddc51fdcc2c9c1d95cf7c6dc2058e22329ffc0375783 WatchSource:0}: Error finding container 537af44346e21cb2800cddc51fdcc2c9c1d95cf7c6dc2058e22329ffc0375783: Status 404 returned error can't find the container with id 537af44346e21cb2800cddc51fdcc2c9c1d95cf7c6dc2058e22329ffc0375783 Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.883634 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.883862 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.383830277 +0000 UTC m=+144.889435474 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.883922 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.884525 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.384513698 +0000 UTC m=+144.890118845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.993044 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.994081 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.494035984 +0000 UTC m=+144.999641131 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.998157 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:21 crc kubenswrapper[4776]: E0127 19:12:21.998753 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.498737656 +0000 UTC m=+145.004342813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:21 crc kubenswrapper[4776]: I0127 19:12:21.998925 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-p4pr2"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.001139 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.005214 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-n8wxr"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.013728 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cdd4p"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.034990 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-tzpjt"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.102594 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.103023 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.603006063 +0000 UTC m=+145.108611210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.135951 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16630e9b_9226_44d4_86e9_48fda67033fe.slice/crio-d652d2d50b13b5a8599db9435a1891149fb7dccf6c2c7d941e0c8ded90b3e9a9 WatchSource:0}: Error finding container d652d2d50b13b5a8599db9435a1891149fb7dccf6c2c7d941e0c8ded90b3e9a9: Status 404 returned error can't find the container with id d652d2d50b13b5a8599db9435a1891149fb7dccf6c2c7d941e0c8ded90b3e9a9 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.205013 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.205431 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.705406802 +0000 UTC m=+145.211011949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.306823 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.307228 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.807209404 +0000 UTC m=+145.312814551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.354130 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.369377 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.375838 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.382961 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.408274 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.409482 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:22.908603353 +0000 UTC m=+145.414208500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.411128 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.435517 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" event={"ID":"c944923b-00f1-4f4f-a883-aeb69d86c343","Type":"ContainerStarted","Data":"11c42742ebf12903135d59fb08c2eca789adf6bdabb2b3e2e3b977d6c408604e"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.435838 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" event={"ID":"c944923b-00f1-4f4f-a883-aeb69d86c343","Type":"ContainerStarted","Data":"0aeacce88ca5edd1e070c5c921d68051a6ea8689d0391f56a2e7e4bf054f17c2"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.439296 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.441276 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" event={"ID":"16630e9b-9226-44d4-86e9-48fda67033fe","Type":"ContainerStarted","Data":"d652d2d50b13b5a8599db9435a1891149fb7dccf6c2c7d941e0c8ded90b3e9a9"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.443192 4776 generic.go:334] "Generic (PLEG): container finished" podID="dafb70f2-b919-42f3-9f40-9f6183db8369" containerID="5d40b8f72ac8f25b1464380e8e47344cdd14c61a38a0a0cec3f585bb4da7d164" exitCode=0 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.443251 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" event={"ID":"dafb70f2-b919-42f3-9f40-9f6183db8369","Type":"ContainerDied","Data":"5d40b8f72ac8f25b1464380e8e47344cdd14c61a38a0a0cec3f585bb4da7d164"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.445180 4776 generic.go:334] "Generic (PLEG): container finished" podID="68c010e7-6718-48f9-b901-f6047901b8b2" containerID="b2307e72eb5522d4837a8d6cbb888c2682e69b26c71e6f791dfa80c56fcaaa8c" exitCode=0 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.445382 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" event={"ID":"68c010e7-6718-48f9-b901-f6047901b8b2","Type":"ContainerDied","Data":"b2307e72eb5522d4837a8d6cbb888c2682e69b26c71e6f791dfa80c56fcaaa8c"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.445432 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" event={"ID":"68c010e7-6718-48f9-b901-f6047901b8b2","Type":"ContainerStarted","Data":"3f485053190c65882053782339457443abf3dc1af0b91ed590038a6045cdb80f"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.447287 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" event={"ID":"d2485845-4359-4e05-9523-8e5e4a40d827","Type":"ContainerStarted","Data":"e86e670cfe1e05bcf02748cb20a18bacc54783a1b85e7f6e3f46cea6e09f3002"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.447563 4776 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rnpjf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.447616 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.449156 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" event={"ID":"db05b4c4-16ee-4a32-ac8c-eefda506c6db","Type":"ContainerStarted","Data":"d592d52c2bfaa8c16f323130665c2c440ca509d66ad6767e25aba74d498de86d"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.452195 4776 generic.go:334] "Generic (PLEG): container finished" podID="61b154b9-1f8d-461c-afa3-0c5204d5084f" containerID="bbb65304c5e2d25f93747678e47db5c2136199087f99ef5981609fb644581603" exitCode=0 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.452276 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" event={"ID":"61b154b9-1f8d-461c-afa3-0c5204d5084f","Type":"ContainerDied","Data":"bbb65304c5e2d25f93747678e47db5c2136199087f99ef5981609fb644581603"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.456288 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cdd4p" event={"ID":"9e2f4f0c-a78b-40dc-855f-a411b72183c5","Type":"ContainerStarted","Data":"eb9dabaf9449149e053c011034d460246e6be680c0b9a3c480f8e6acf9d27de6"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.465551 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" event={"ID":"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9","Type":"ContainerStarted","Data":"ffbca4a673db2d029d365b460c95b447a2a0f19a786b16aeb6304d7c7f95d69a"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.471732 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b9hnh" event={"ID":"fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a","Type":"ContainerStarted","Data":"ae10000593ffdf74688b43652a39ab3e1c1c18a7e0743fb01034680bc924bf51"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.485838 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-pz2qz" event={"ID":"f1e5fa8d-e0b4-4e13-8954-706f606b84fa","Type":"ContainerStarted","Data":"d8ece984b2db2f6cba58e1d51c95fe7b563897d6e9da38fac2488b99aea1f99f"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.485905 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-pz2qz" event={"ID":"f1e5fa8d-e0b4-4e13-8954-706f606b84fa","Type":"ContainerStarted","Data":"c9d5e919061a20703a5bbe8d67cc46ccf05dd07939ffb394b2a5067788f140aa"} Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.491296 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62831e09_6067_4270_be11_d251956dffa2.slice/crio-f28bf08b3cf1e752d18751fd202a8a0aa0ff8f2fef1d25628dafc6090d4a1ff9 WatchSource:0}: Error finding container f28bf08b3cf1e752d18751fd202a8a0aa0ff8f2fef1d25628dafc6090d4a1ff9: Status 404 returned error can't find the container with id f28bf08b3cf1e752d18751fd202a8a0aa0ff8f2fef1d25628dafc6090d4a1ff9 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.494000 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" event={"ID":"dfecd87d-0e25-4116-ae11-72e607a011dd","Type":"ContainerStarted","Data":"b161bf76f517d8d602a77e4f9e96d22a4813848ec37b62c0413aaea67aa540ea"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.495433 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.512371 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.512600 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.513864 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.013841729 +0000 UTC m=+145.519446876 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.527309 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" event={"ID":"ca924069-e7d6-4520-94d9-98f18ed4002e","Type":"ContainerStarted","Data":"537af44346e21cb2800cddc51fdcc2c9c1d95cf7c6dc2058e22329ffc0375783"} Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.552029 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm"] Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.593237 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52f1c1ca_0b7a_4d48_b967_1d4525e220e1.slice/crio-bed9183c05cb9ea771441bb0222706f8ebc39162a2d3426bdf433856e87deccb WatchSource:0}: Error finding container bed9183c05cb9ea771441bb0222706f8ebc39162a2d3426bdf433856e87deccb: Status 404 returned error can't find the container with id bed9183c05cb9ea771441bb0222706f8ebc39162a2d3426bdf433856e87deccb Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.614923 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.616761 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.116738944 +0000 UTC m=+145.622344091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.682032 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-894r8"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.686977 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.696074 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-7pq2b"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.715965 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.716138 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.216088831 +0000 UTC m=+145.721693978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.716393 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.716759 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.216743441 +0000 UTC m=+145.722348588 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.745418 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod256c7026_dc32_4fd8_9250_608d1b5de8bf.slice/crio-d9838f838b04408c35cdf52788f7e0c916136b723057009b332f6bb9f8b54245 WatchSource:0}: Error finding container d9838f838b04408c35cdf52788f7e0c916136b723057009b332f6bb9f8b54245: Status 404 returned error can't find the container with id d9838f838b04408c35cdf52788f7e0c916136b723057009b332f6bb9f8b54245 Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.765720 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.773287 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46a581f4_765e_458d_866b_fe1803351969.slice/crio-7081f0b2fc6eefaf2ee77ec2a396416ae448055ac660d73898e2771e3586c10f WatchSource:0}: Error finding container 7081f0b2fc6eefaf2ee77ec2a396416ae448055ac660d73898e2771e3586c10f: Status 404 returned error can't find the container with id 7081f0b2fc6eefaf2ee77ec2a396416ae448055ac660d73898e2771e3586c10f Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.776021 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1353bef9_e9b7_4242_afc2_1ff34d4da935.slice/crio-e7e0db39852dc4614458a969a01a2cfa264a49f4acf16483e2b0b8dabcad396f WatchSource:0}: Error finding container e7e0db39852dc4614458a969a01a2cfa264a49f4acf16483e2b0b8dabcad396f: Status 404 returned error can't find the container with id e7e0db39852dc4614458a969a01a2cfa264a49f4acf16483e2b0b8dabcad396f Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.818806 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.819213 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.319195212 +0000 UTC m=+145.824800359 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.848645 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.853047 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.859334 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.877528 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.901888 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dghnj"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.920291 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:22 crc kubenswrapper[4776]: E0127 19:12:22.922293 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.422278753 +0000 UTC m=+145.927883900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.935441 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-vnp9r"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.940349 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.944131 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-rkkzb"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.948694 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.956846 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.962191 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.976819 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-zszr6"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.991729 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp"] Jan 27 19:12:22 crc kubenswrapper[4776]: W0127 19:12:22.994984 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod59235ec5_26bb_4966_a543_db238811abe6.slice/crio-6f3ba325d968d6c7f57c66b0fad942c22a8d151f4d4061bdbebe7b5678fb72fb WatchSource:0}: Error finding container 6f3ba325d968d6c7f57c66b0fad942c22a8d151f4d4061bdbebe7b5678fb72fb: Status 404 returned error can't find the container with id 6f3ba325d968d6c7f57c66b0fad942c22a8d151f4d4061bdbebe7b5678fb72fb Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.995662 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.998320 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml"] Jan 27 19:12:22 crc kubenswrapper[4776]: I0127 19:12:22.999687 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-r28v4"] Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.001726 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-fkb4r"] Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.018729 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-r869q"] Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.025594 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.026099 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.526076885 +0000 UTC m=+146.031682032 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: W0127 19:12:23.028620 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4aa9ef2_3d89_409d_97a7_3990f846504a.slice/crio-ae13d1c54c63523658aa0b317907e7d5c49ee7f4c26f7798c2f064f29a8d8edc WatchSource:0}: Error finding container ae13d1c54c63523658aa0b317907e7d5c49ee7f4c26f7798c2f064f29a8d8edc: Status 404 returned error can't find the container with id ae13d1c54c63523658aa0b317907e7d5c49ee7f4c26f7798c2f064f29a8d8edc Jan 27 19:12:23 crc kubenswrapper[4776]: W0127 19:12:23.035386 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod039a9234_6e8f_4854_b694_478409aacf35.slice/crio-f5f7ea63e4450f6ead3fe4597f8f0d461722b7489ca4da48d39599488eee156d WatchSource:0}: Error finding container f5f7ea63e4450f6ead3fe4597f8f0d461722b7489ca4da48d39599488eee156d: Status 404 returned error can't find the container with id f5f7ea63e4450f6ead3fe4597f8f0d461722b7489ca4da48d39599488eee156d Jan 27 19:12:23 crc kubenswrapper[4776]: W0127 19:12:23.068284 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62e0f316_4ab0_4e56_9874_3008b20d930e.slice/crio-4a8cb8ce2e6178282191fe64c06beefb64f90d9195a615bb10f0440e912a7bca WatchSource:0}: Error finding container 4a8cb8ce2e6178282191fe64c06beefb64f90d9195a615bb10f0440e912a7bca: Status 404 returned error can't find the container with id 4a8cb8ce2e6178282191fe64c06beefb64f90d9195a615bb10f0440e912a7bca Jan 27 19:12:23 crc kubenswrapper[4776]: W0127 19:12:23.080716 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod921a2ca9_0cb3_401a_a978_a70a63d9418d.slice/crio-7a9ec2c8334de9039f3751d9bd22eb2b4aa32a8d5ed82ea30ca853cf1779c494 WatchSource:0}: Error finding container 7a9ec2c8334de9039f3751d9bd22eb2b4aa32a8d5ed82ea30ca853cf1779c494: Status 404 returned error can't find the container with id 7a9ec2c8334de9039f3751d9bd22eb2b4aa32a8d5ed82ea30ca853cf1779c494 Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.127577 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.128186 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.627975499 +0000 UTC m=+146.133580646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: W0127 19:12:23.134480 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod254e5b81_f470_4b03_a703_bc60a100d4da.slice/crio-b96d7f8f0a0f5ec0523cdd90fab2a9af436ec2e6a74ca78d6340aed81772df72 WatchSource:0}: Error finding container b96d7f8f0a0f5ec0523cdd90fab2a9af436ec2e6a74ca78d6340aed81772df72: Status 404 returned error can't find the container with id b96d7f8f0a0f5ec0523cdd90fab2a9af436ec2e6a74ca78d6340aed81772df72 Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.183910 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.186142 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" podStartSLOduration=120.186130476 podStartE2EDuration="2m0.186130476s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.184882257 +0000 UTC m=+145.690487404" watchObservedRunningTime="2026-01-27 19:12:23.186130476 +0000 UTC m=+145.691735623" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.189964 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:23 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:23 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:23 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.190018 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.229033 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.229293 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.729277086 +0000 UTC m=+146.234882233 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.266093 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-pz2qz" podStartSLOduration=5.266067113 podStartE2EDuration="5.266067113s" podCreationTimestamp="2026-01-27 19:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.263846046 +0000 UTC m=+145.769451203" watchObservedRunningTime="2026-01-27 19:12:23.266067113 +0000 UTC m=+145.771672260" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.332245 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.332667 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.832652775 +0000 UTC m=+146.338257922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.349993 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-b9hnh" podStartSLOduration=120.349972341 podStartE2EDuration="2m0.349972341s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.305786659 +0000 UTC m=+145.811391836" watchObservedRunningTime="2026-01-27 19:12:23.349972341 +0000 UTC m=+145.855577488" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.433033 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.433335 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.933307952 +0000 UTC m=+146.438913099 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.433856 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.434415 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:23.934399105 +0000 UTC m=+146.440004252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.536600 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.537162 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.037147575 +0000 UTC m=+146.542752722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.553192 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" event={"ID":"a01b7ee4-02a1-4b53-81bf-7367b7e025a5","Type":"ContainerStarted","Data":"48561ec1b56ed1985a7be1c6fd3a2c63e866476755dae1896770e3f47985bebe"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.560644 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" event={"ID":"68c010e7-6718-48f9-b901-f6047901b8b2","Type":"ContainerStarted","Data":"235c1156a441ca4c700afd0849df3e70e29fb5cafc5e457e17730bd475c117da"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.561236 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.566543 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" event={"ID":"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9","Type":"ContainerStarted","Data":"832b8a58b59a5635a2200f779e083410d8299fdaf670472c8654e4e901e282d2"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.566573 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" event={"ID":"e315a8b4-f52f-4cd8-9ee3-f450e4c62cb9","Type":"ContainerStarted","Data":"bc31e48158d8d2fb91052f9542cd31c553d200263cbb972a5bc8be16ed0af69c"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.573954 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" event={"ID":"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e","Type":"ContainerStarted","Data":"bd5b0558c11a512d3184b0e20498cc1ff4b479cfdf9fbcdac9d07e04db80d21c"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.574011 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" event={"ID":"8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e","Type":"ContainerStarted","Data":"d9479945ded7c7e7af8f754442e2cb58fb3af85e2e36762b820593834db7269a"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.574427 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.578093 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" event={"ID":"46a581f4-765e-458d-866b-fe1803351969","Type":"ContainerStarted","Data":"c572b55e0b30657cf61634bde0810494fb897641baa7f411257579883196fb46"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.578177 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" event={"ID":"46a581f4-765e-458d-866b-fe1803351969","Type":"ContainerStarted","Data":"7081f0b2fc6eefaf2ee77ec2a396416ae448055ac660d73898e2771e3586c10f"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.579583 4776 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9d8s8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.579647 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" podUID="8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.582274 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podStartSLOduration=120.582258095 podStartE2EDuration="2m0.582258095s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.35715864 +0000 UTC m=+145.862763797" watchObservedRunningTime="2026-01-27 19:12:23.582258095 +0000 UTC m=+146.087863242" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.584208 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" event={"ID":"5991ed3f-a5bf-4586-918a-b79afd2d8c27","Type":"ContainerStarted","Data":"89d2686ec684ef2f04cd6a80a71e420f35801fe54242c0881605bf3fa9aa3f99"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.585402 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" podStartSLOduration=120.5853707 podStartE2EDuration="2m0.5853707s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.580599855 +0000 UTC m=+146.086205002" watchObservedRunningTime="2026-01-27 19:12:23.5853707 +0000 UTC m=+146.090975847" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.589085 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" event={"ID":"de3063f6-acdd-4c62-9d94-d5dd82b0fda1","Type":"ContainerStarted","Data":"908b7ff9947443edb0746fc35902e9cf8156568d63348f45c785ff2a17e29d9a"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.589129 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" event={"ID":"de3063f6-acdd-4c62-9d94-d5dd82b0fda1","Type":"ContainerStarted","Data":"39519705e321423ab71b9de594d31be49e8601076ea8043ccb891c42bd71ae46"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.592938 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-894r8" event={"ID":"1353bef9-e9b7-4242-afc2-1ff34d4da935","Type":"ContainerStarted","Data":"cb98671eab0a6300421f677008fa9f23948589a11192b2f64a86f1a83f182c20"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.592978 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-894r8" event={"ID":"1353bef9-e9b7-4242-afc2-1ff34d4da935","Type":"ContainerStarted","Data":"e7e0db39852dc4614458a969a01a2cfa264a49f4acf16483e2b0b8dabcad396f"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.593398 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.606511 4776 patch_prober.go:28] interesting pod/console-operator-58897d9998-894r8 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.606582 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-894r8" podUID="1353bef9-e9b7-4242-afc2-1ff34d4da935" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.611055 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-49rr2" podStartSLOduration=120.611037339 podStartE2EDuration="2m0.611037339s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.606922634 +0000 UTC m=+146.112527771" watchObservedRunningTime="2026-01-27 19:12:23.611037339 +0000 UTC m=+146.116642486" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.614603 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" event={"ID":"62831e09-6067-4270-be11-d251956dffa2","Type":"ContainerStarted","Data":"e80c279d9efe7fc88accd9d69fdfed47d2275e173e096d88bb3cf7281d0af6a5"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.614648 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" event={"ID":"62831e09-6067-4270-be11-d251956dffa2","Type":"ContainerStarted","Data":"f28bf08b3cf1e752d18751fd202a8a0aa0ff8f2fef1d25628dafc6090d4a1ff9"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.625171 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" event={"ID":"256c7026-dc32-4fd8-9250-608d1b5de8bf","Type":"ContainerStarted","Data":"e9fc99cd2fca30c072fe71b16a53fed5c309ce007ecfa151b85a7ce5546eaf4e"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.625235 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" event={"ID":"256c7026-dc32-4fd8-9250-608d1b5de8bf","Type":"ContainerStarted","Data":"d9838f838b04408c35cdf52788f7e0c916136b723057009b332f6bb9f8b54245"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.632460 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" podStartSLOduration=120.632438979 podStartE2EDuration="2m0.632438979s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.629890372 +0000 UTC m=+146.135495529" watchObservedRunningTime="2026-01-27 19:12:23.632438979 +0000 UTC m=+146.138044126" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.638316 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.638639 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.138624447 +0000 UTC m=+146.644229594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.644568 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" event={"ID":"f0d8de85-33c3-4bd0-9589-f140ba0b77c4","Type":"ContainerStarted","Data":"3d6789b0e8b13750f6859bbbb513fb73a6071b29727b62e212fbd44158556947"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.645006 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" event={"ID":"f0d8de85-33c3-4bd0-9589-f140ba0b77c4","Type":"ContainerStarted","Data":"aafa97b16ba3d537c05ca80ac90670bff448617ada9abb05864842e0c7e5f6ef"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.649948 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" event={"ID":"245499a3-8fea-446c-8a3e-daf9df73bf15","Type":"ContainerStarted","Data":"b9ebcf237a012d96c5ceb0eba56b69ca7666854b8fddd92272eb48b8381e29ab"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.652187 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-p4pr2" podStartSLOduration=120.652169718 podStartE2EDuration="2m0.652169718s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.651324373 +0000 UTC m=+146.156929510" watchObservedRunningTime="2026-01-27 19:12:23.652169718 +0000 UTC m=+146.157774865" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.670050 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" event={"ID":"62e0f316-4ab0-4e56-9874-3008b20d930e","Type":"ContainerStarted","Data":"4a8cb8ce2e6178282191fe64c06beefb64f90d9195a615bb10f0440e912a7bca"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.692162 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vnp9r" event={"ID":"039a9234-6e8f-4854-b694-478409aacf35","Type":"ContainerStarted","Data":"f5f7ea63e4450f6ead3fe4597f8f0d461722b7489ca4da48d39599488eee156d"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.695749 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" event={"ID":"6c7a556d-56ac-4392-8f4e-c2c824ab5e90","Type":"ContainerStarted","Data":"7632966e709cd4dd00351d20406ee1d899aacf36e7af9ccabfbc7e87645af539"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.698830 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zszr6" event={"ID":"921a2ca9-0cb3-401a-a978-a70a63d9418d","Type":"ContainerStarted","Data":"7a9ec2c8334de9039f3751d9bd22eb2b4aa32a8d5ed82ea30ca853cf1779c494"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.709238 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cdd4p" event={"ID":"9e2f4f0c-a78b-40dc-855f-a411b72183c5","Type":"ContainerStarted","Data":"e9e8f8605e0a76e09f6c7695afd92c2790315fba103515e82d62ffaaccd1cadb"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.709325 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-f4dn6" podStartSLOduration=120.709312004 podStartE2EDuration="2m0.709312004s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.67495732 +0000 UTC m=+146.180562467" watchObservedRunningTime="2026-01-27 19:12:23.709312004 +0000 UTC m=+146.214917151" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.709844 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.713392 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" event={"ID":"33ba45e2-c084-450f-822d-2326de41ad36","Type":"ContainerStarted","Data":"dcd980cc523b1a35a9c4a3b176ed85895a24000560b52ed2bbb6a62a1c94862d"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.713434 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" event={"ID":"33ba45e2-c084-450f-822d-2326de41ad36","Type":"ContainerStarted","Data":"8ba2b7b7388eb4f8b5850baa6729333ac65525c92491e20da8aaed7ab4a14fe0"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.713573 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-894r8" podStartSLOduration=120.713566003 podStartE2EDuration="2m0.713566003s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.708810929 +0000 UTC m=+146.214416086" watchObservedRunningTime="2026-01-27 19:12:23.713566003 +0000 UTC m=+146.219171150" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.729301 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.729362 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.735042 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" event={"ID":"d2485845-4359-4e05-9523-8e5e4a40d827","Type":"ContainerStarted","Data":"7dbbc98a5f29f22cf5c35abb955b5b0176d7fe7b7ad0f25105053eadc7a4b17c"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.735095 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" event={"ID":"d2485845-4359-4e05-9523-8e5e4a40d827","Type":"ContainerStarted","Data":"e11771eff9d9b3fe83b7b291b6344d4b914e62604ac59d6cf06581fc9d0b5f56"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.739836 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.741195 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.241175321 +0000 UTC m=+146.746780468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.743310 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" event={"ID":"6e7febfa-69fa-429e-85be-9d6d54d18760","Type":"ContainerStarted","Data":"fb61f7abf430194f3f1783835a05a8ce592b9ea53d53c11886451ccc884a4bb2"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.748125 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" event={"ID":"986fcda6-5c06-431b-bba6-d108dbeb3d9f","Type":"ContainerStarted","Data":"025d415fff724cbc440c8af6ba9a2f70ac715a819572ef467b4d4482b6f90105"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.750019 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" event={"ID":"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7","Type":"ContainerStarted","Data":"adc61acb1f17637a63aa065f875a6bda077cf7e8f95018f5b8d0733fb57ec852"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.752931 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" event={"ID":"c4aa9ef2-3d89-409d-97a7-3990f846504a","Type":"ContainerStarted","Data":"ae13d1c54c63523658aa0b317907e7d5c49ee7f4c26f7798c2f064f29a8d8edc"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.759243 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dvh8z" podStartSLOduration=120.75922515 podStartE2EDuration="2m0.75922515s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.731798087 +0000 UTC m=+146.237403234" watchObservedRunningTime="2026-01-27 19:12:23.75922515 +0000 UTC m=+146.264830297" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.760369 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-cdd4p" podStartSLOduration=120.760364325 podStartE2EDuration="2m0.760364325s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.758509868 +0000 UTC m=+146.264115045" watchObservedRunningTime="2026-01-27 19:12:23.760364325 +0000 UTC m=+146.265969472" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.763295 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" event={"ID":"254e5b81-f470-4b03-a703-bc60a100d4da","Type":"ContainerStarted","Data":"b96d7f8f0a0f5ec0523cdd90fab2a9af436ec2e6a74ca78d6340aed81772df72"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.772123 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" event={"ID":"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee","Type":"ContainerStarted","Data":"e6a0ae1ba7a6cb7719e885ad1b04d00ab6e0ef0aaa3b1cefcc8a58dac41ecf47"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.772165 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" event={"ID":"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee","Type":"ContainerStarted","Data":"ffb7f543b029df511c6135193db65cface159da2f0cac761a70b37d5b2ba2f42"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.779849 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" event={"ID":"152fa2df-4029-4e25-aa4e-98005cac6226","Type":"ContainerStarted","Data":"7fc7daa7fc24c34bb98062a846c4573d90757d60a279707e005da7595fd62683"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.779888 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" event={"ID":"152fa2df-4029-4e25-aa4e-98005cac6226","Type":"ContainerStarted","Data":"7af803a8668ce509582850715b20b72bc4897747168367835963b33764752907"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.785585 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-vnp9r" podStartSLOduration=120.78555863 podStartE2EDuration="2m0.78555863s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.777048781 +0000 UTC m=+146.282653948" watchObservedRunningTime="2026-01-27 19:12:23.78555863 +0000 UTC m=+146.291163777" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.791611 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" event={"ID":"dafb70f2-b919-42f3-9f40-9f6183db8369","Type":"ContainerStarted","Data":"4499a7b05f8fce38d9c9cc4d248a989e68de7a695d86a6b291721e36038c6586"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.804241 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-dfrfs" podStartSLOduration=120.804213736 podStartE2EDuration="2m0.804213736s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.798685468 +0000 UTC m=+146.304290635" watchObservedRunningTime="2026-01-27 19:12:23.804213736 +0000 UTC m=+146.309818903" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.815198 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" event={"ID":"c03dcab7-3d8d-424a-9e6c-ce689f5cb077","Type":"ContainerStarted","Data":"18b982e13cb4a0c92559fbbb2c98466bcc46769dcebd2702ac9f1e681849376e"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.820310 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" event={"ID":"59235ec5-26bb-4966-a543-db238811abe6","Type":"ContainerStarted","Data":"6f3ba325d968d6c7f57c66b0fad942c22a8d151f4d4061bdbebe7b5678fb72fb"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.829703 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-j9lwf" podStartSLOduration=120.829682709 podStartE2EDuration="2m0.829682709s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.828053679 +0000 UTC m=+146.333658826" watchObservedRunningTime="2026-01-27 19:12:23.829682709 +0000 UTC m=+146.335287866" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.830126 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" event={"ID":"53042356-fe0e-4cd4-ba21-301a7040c408","Type":"ContainerStarted","Data":"ac242b2c0e12f4585274056b813d5f066c9f27d99bc83346b8de16822b234a4d"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.845273 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" event={"ID":"db05b4c4-16ee-4a32-ac8c-eefda506c6db","Type":"ContainerStarted","Data":"7c76737f21239a17d658ec2654f19fc6cc464417ff74f5aa908341f6e1036eae"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.845318 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" event={"ID":"db05b4c4-16ee-4a32-ac8c-eefda506c6db","Type":"ContainerStarted","Data":"f116ac42196270e425a7582ac0122a83d579eae0f6565c7b022ed5d17ae6633b"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.845730 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.847915 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.347894922 +0000 UTC m=+146.853500069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.850754 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-wkkxn" podStartSLOduration=120.850739188 podStartE2EDuration="2m0.850739188s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.850684607 +0000 UTC m=+146.356289764" watchObservedRunningTime="2026-01-27 19:12:23.850739188 +0000 UTC m=+146.356344335" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.854473 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" event={"ID":"26eec851-c48a-47af-92a9-3fd33f385efe","Type":"ContainerStarted","Data":"c61734a2b474bf9b20c84f29e4bc5e1644fc7de578a6d39cb020b8812d1953a2"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.870816 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-tzpjt" podStartSLOduration=120.870796478 podStartE2EDuration="2m0.870796478s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.869179479 +0000 UTC m=+146.374784636" watchObservedRunningTime="2026-01-27 19:12:23.870796478 +0000 UTC m=+146.376401635" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.876492 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" event={"ID":"61b154b9-1f8d-461c-afa3-0c5204d5084f","Type":"ContainerStarted","Data":"026225e508ca79913f779fc190a832c0b2f657cbfed85833d18e7bc33cc370c0"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.883421 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" event={"ID":"ca924069-e7d6-4520-94d9-98f18ed4002e","Type":"ContainerStarted","Data":"9a266c8817a5a68ce70afe4e10da881807f967b4663ef484f3aa1caf4e016c61"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.883459 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" event={"ID":"ca924069-e7d6-4520-94d9-98f18ed4002e","Type":"ContainerStarted","Data":"979e4a979f3a5fe62b6ba0092d72a5c3c29fd932127e82a3edff21d7f41077df"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.913986 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" event={"ID":"52f1c1ca-0b7a-4d48-b967-1d4525e220e1","Type":"ContainerStarted","Data":"2154b01c091ac20efeba5b5b262b3c5a10aaa07c134239fec93efc4b6835ae29"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.914047 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" event={"ID":"52f1c1ca-0b7a-4d48-b967-1d4525e220e1","Type":"ContainerStarted","Data":"bed9183c05cb9ea771441bb0222706f8ebc39162a2d3426bdf433856e87deccb"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.914519 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.936435 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" podStartSLOduration=120.936417181 podStartE2EDuration="2m0.936417181s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.920395214 +0000 UTC m=+146.426000371" watchObservedRunningTime="2026-01-27 19:12:23.936417181 +0000 UTC m=+146.442022328" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.950216 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.952750 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.452719776 +0000 UTC m=+146.958324933 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.953703 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.955003 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" event={"ID":"16630e9b-9226-44d4-86e9-48fda67033fe","Type":"ContainerStarted","Data":"c8f4b2daf7fed0469ecee31a929cd65ca0a912fade4759f3350bb6da11e4cbf0"} Jan 27 19:12:23 crc kubenswrapper[4776]: E0127 19:12:23.956327 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.456307495 +0000 UTC m=+146.961912642 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.956734 4776 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2qjhm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.956992 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" podUID="52f1c1ca-0b7a-4d48-b967-1d4525e220e1" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.966386 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rkkzb" event={"ID":"e117e37f-f455-4bfc-b859-b577cc25f4d8","Type":"ContainerStarted","Data":"95761ec0095160371a61bd06a7b06486465516690dbd263aaebecbe9c27788e5"} Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.973836 4776 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rnpjf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.973883 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.988853 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-xspbt" podStartSLOduration=120.98876069 podStartE2EDuration="2m0.98876069s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.950044105 +0000 UTC m=+146.455649262" watchObservedRunningTime="2026-01-27 19:12:23.98876069 +0000 UTC m=+146.494365847" Jan 27 19:12:23 crc kubenswrapper[4776]: I0127 19:12:23.991969 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" podStartSLOduration=120.991955957 podStartE2EDuration="2m0.991955957s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:23.98939659 +0000 UTC m=+146.495001737" watchObservedRunningTime="2026-01-27 19:12:23.991955957 +0000 UTC m=+146.497561104" Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.058629 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.061119 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.561093007 +0000 UTC m=+147.066698154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.065413 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-rkkzb" podStartSLOduration=6.065396108 podStartE2EDuration="6.065396108s" podCreationTimestamp="2026-01-27 19:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:24.030985753 +0000 UTC m=+146.536590890" watchObservedRunningTime="2026-01-27 19:12:24.065396108 +0000 UTC m=+146.571001255" Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.160757 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.161162 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.661146365 +0000 UTC m=+147.166751512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.181744 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:24 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:24 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:24 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.182041 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.261436 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.261849 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.761831322 +0000 UTC m=+147.267436469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.363378 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.363685 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.863672356 +0000 UTC m=+147.369277503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.464370 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.464545 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.964520518 +0000 UTC m=+147.470125665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.464986 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.465260 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:24.965248271 +0000 UTC m=+147.470853418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.566469 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.566616 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.066594669 +0000 UTC m=+147.572199836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.566758 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.567126 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.067114744 +0000 UTC m=+147.572719891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.668206 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.668381 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.168351138 +0000 UTC m=+147.673956285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.668864 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.669247 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.169235795 +0000 UTC m=+147.674841012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.769669 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.769894 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.269863541 +0000 UTC m=+147.775468688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.769998 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.770387 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.270374277 +0000 UTC m=+147.775979424 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.871199 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.871367 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.371341013 +0000 UTC m=+147.876946160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.871496 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.871815 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.371807067 +0000 UTC m=+147.877412214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.972797 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.972918 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.472895356 +0000 UTC m=+147.978500493 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.973001 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:24 crc kubenswrapper[4776]: E0127 19:12:24.973256 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.473240807 +0000 UTC m=+147.978845954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.973760 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" event={"ID":"62e0f316-4ab0-4e56-9874-3008b20d930e","Type":"ContainerStarted","Data":"e49a9256283d60ea5a2025f4a242ccdc46254eed24b9a186142203f8322cb513"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.975200 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" event={"ID":"254e5b81-f470-4b03-a703-bc60a100d4da","Type":"ContainerStarted","Data":"6a924e719f97ef75dabcc857e9c39b1c1e3d25bc9340a160597101c5eac8195a"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.976611 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" event={"ID":"256c7026-dc32-4fd8-9250-608d1b5de8bf","Type":"ContainerStarted","Data":"f83d83d3abd93a2f7490ca45d2537c3f012f9888eee8e3329d278c1df0b0748b"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.977941 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" event={"ID":"6c7a556d-56ac-4392-8f4e-c2c824ab5e90","Type":"ContainerStarted","Data":"ea89c994e36d6d15158378992a285a8c6154e2cdf355bf0f4f29aad639eddc07"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.977972 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" event={"ID":"6c7a556d-56ac-4392-8f4e-c2c824ab5e90","Type":"ContainerStarted","Data":"e0e861be4f5ce4553966e2b657e7c26d56c7f33ed6c29c465ca1b61d371b6f6b"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.979125 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" event={"ID":"c03dcab7-3d8d-424a-9e6c-ce689f5cb077","Type":"ContainerStarted","Data":"1d47c760c1b7afb5cd8d1b43770673a39009fcf7a0c1d7615860552774a5087a"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.981131 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" event={"ID":"c4aa9ef2-3d89-409d-97a7-3990f846504a","Type":"ContainerStarted","Data":"832e70efb42a5acfb5cca515db7faff1bb5ece96d3bb62c54531b8894d1e07a8"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.983010 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" event={"ID":"c9b30cf2-d2a9-4e2c-896d-60a54109e9ee","Type":"ContainerStarted","Data":"fa34096d157b06ac0ac00d651c2ad99252d4f223f46a67bee2cf594820da53ed"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.991312 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" event={"ID":"5991ed3f-a5bf-4586-918a-b79afd2d8c27","Type":"ContainerStarted","Data":"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f"} Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.991828 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.993458 4776 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dghnj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.993508 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Jan 27 19:12:24 crc kubenswrapper[4776]: I0127 19:12:24.997729 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-vnp9r" event={"ID":"039a9234-6e8f-4854-b694-478409aacf35","Type":"ContainerStarted","Data":"97460409d76454d3cdd5b31d283cfdc57edf7e8ab6744ca17f11d6b95fa9c231"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.003109 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" event={"ID":"6e7febfa-69fa-429e-85be-9d6d54d18760","Type":"ContainerStarted","Data":"b05648f338d409859fa38a44ef5ef2a0697d7c526d9e867b0b25054bbf0e1606"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.003280 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.004798 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" event={"ID":"de3063f6-acdd-4c62-9d94-d5dd82b0fda1","Type":"ContainerStarted","Data":"cbcc0ef8b98effd190f1cfd6282905a7d247f084987bafa1f21553d988fb4a00"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.005248 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.005392 4776 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tqlbs container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.005461 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.006296 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" event={"ID":"26eec851-c48a-47af-92a9-3fd33f385efe","Type":"ContainerStarted","Data":"a8f609a10164a91796e976abe528c11cd8e12cc1702c8d0955706dccc4ab11b5"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.007312 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" event={"ID":"245499a3-8fea-446c-8a3e-daf9df73bf15","Type":"ContainerStarted","Data":"f92e22136e4ac7e623862380f3e99463c00687eaebacb083870bfff8437adbd7"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.008681 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" event={"ID":"b05a7129-f82d-43ee-ac7e-9c2d1122a4d7","Type":"ContainerStarted","Data":"39922e2146ce43be1c3a64be27fc5c4cc5f1ba8ad61ceb9291ead5ebf23b3cd3"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.008863 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.009841 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-rkkzb" event={"ID":"e117e37f-f455-4bfc-b859-b577cc25f4d8","Type":"ContainerStarted","Data":"56219546fc5f2975d015c133db0c5c39b00df2f98d4ec905e474269893c9a952"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.011085 4776 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-gf6ml container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.011125 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" podUID="b05a7129-f82d-43ee-ac7e-9c2d1122a4d7" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.011965 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" event={"ID":"dafb70f2-b919-42f3-9f40-9f6183db8369","Type":"ContainerStarted","Data":"2ba835f91a0ebea14ec38524faf66b5da777b050824e7dab422c9240894054cc"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.013229 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" event={"ID":"59235ec5-26bb-4966-a543-db238811abe6","Type":"ContainerStarted","Data":"339d776c7a865cc7a6bc539446f773cd146ed1a7378e34dafc7ee6a9f7f36259"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.013260 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" event={"ID":"59235ec5-26bb-4966-a543-db238811abe6","Type":"ContainerStarted","Data":"005e7b82d7b5ac2e599d45a87ac7bea8d06f2508f171a615e0aaf3886705c907"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.014573 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" event={"ID":"53042356-fe0e-4cd4-ba21-301a7040c408","Type":"ContainerStarted","Data":"c53516459ff84d66c439c2e54b452d1d17f91e5ba47eba0d006d6f2b2aca9f5d"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.014605 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" event={"ID":"53042356-fe0e-4cd4-ba21-301a7040c408","Type":"ContainerStarted","Data":"6957704f69a4dc83168ba1108873b2b5e31bf68c7bf7781dc09cca0adda2290a"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.015737 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zszr6" event={"ID":"921a2ca9-0cb3-401a-a978-a70a63d9418d","Type":"ContainerStarted","Data":"f332a35e3c5491750d7c6cf8769e6ad6753fc171b671070efb733af45bec5046"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.015778 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-zszr6" event={"ID":"921a2ca9-0cb3-401a-a978-a70a63d9418d","Type":"ContainerStarted","Data":"8d62123e8c3c27c8a861d139539d8523669d44418959d8737b70bb82d9ee982c"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.016846 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" event={"ID":"a01b7ee4-02a1-4b53-81bf-7367b7e025a5","Type":"ContainerStarted","Data":"e7086fe0b90a1672df1125ea0c319c49b9793cce1adddb9ce64894cc1467fd05"} Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017289 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017357 4776 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-2qjhm container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017289 4776 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9d8s8 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017410 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" podUID="52f1c1ca-0b7a-4d48-b967-1d4525e220e1" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017417 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" podUID="8e26b09e-45b3-4bb1-a4e8-9c19a6f1da9e" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017357 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017667 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-rfx55" podStartSLOduration=122.017655336 podStartE2EDuration="2m2.017655336s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.016576594 +0000 UTC m=+147.522181741" watchObservedRunningTime="2026-01-27 19:12:25.017655336 +0000 UTC m=+147.523260483" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017709 4776 patch_prober.go:28] interesting pod/console-operator-58897d9998-894r8 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017735 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-894r8" podUID="1353bef9-e9b7-4242-afc2-1ff34d4da935" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/readyz\": dial tcp 10.217.0.27:8443: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.017996 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-n8wxr" podStartSLOduration=122.017988916 podStartE2EDuration="2m2.017988916s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:24.064900103 +0000 UTC m=+146.570505260" watchObservedRunningTime="2026-01-27 19:12:25.017988916 +0000 UTC m=+147.523594063" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.074940 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.077582 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.577566146 +0000 UTC m=+148.083171293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.101100 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" podStartSLOduration=122.10108462 podStartE2EDuration="2m2.10108462s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.093067277 +0000 UTC m=+147.598672454" watchObservedRunningTime="2026-01-27 19:12:25.10108462 +0000 UTC m=+147.606689767" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.177726 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.178070 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.678059047 +0000 UTC m=+148.183664194 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.185683 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" podStartSLOduration=122.185667268 podStartE2EDuration="2m2.185667268s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.185420371 +0000 UTC m=+147.691025518" watchObservedRunningTime="2026-01-27 19:12:25.185667268 +0000 UTC m=+147.691272405" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.186907 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-sp9rh" podStartSLOduration=122.186900266 podStartE2EDuration="2m2.186900266s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.131430592 +0000 UTC m=+147.637035739" watchObservedRunningTime="2026-01-27 19:12:25.186900266 +0000 UTC m=+147.692505413" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.187818 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:25 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:25 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:25 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.187856 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.223609 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-zsvdf" podStartSLOduration=122.2235923 podStartE2EDuration="2m2.2235923s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.222489956 +0000 UTC m=+147.728095113" watchObservedRunningTime="2026-01-27 19:12:25.2235923 +0000 UTC m=+147.729197457" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.275811 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-bqsqh" podStartSLOduration=122.275793465 podStartE2EDuration="2m2.275793465s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.275710313 +0000 UTC m=+147.781315470" watchObservedRunningTime="2026-01-27 19:12:25.275793465 +0000 UTC m=+147.781398602" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.278515 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.278794 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.778763386 +0000 UTC m=+148.284368533 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.316560 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" podStartSLOduration=122.316532293 podStartE2EDuration="2m2.316532293s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.316010267 +0000 UTC m=+147.821615424" watchObservedRunningTime="2026-01-27 19:12:25.316532293 +0000 UTC m=+147.822137440" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.356466 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-bktbd" podStartSLOduration=122.356451395 podStartE2EDuration="2m2.356451395s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.353831455 +0000 UTC m=+147.859436602" watchObservedRunningTime="2026-01-27 19:12:25.356451395 +0000 UTC m=+147.862056552" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.380105 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.380520 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.880500395 +0000 UTC m=+148.386105542 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.426511 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-zszr6" podStartSLOduration=7.426492882 podStartE2EDuration="7.426492882s" podCreationTimestamp="2026-01-27 19:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.425913035 +0000 UTC m=+147.931518182" watchObservedRunningTime="2026-01-27 19:12:25.426492882 +0000 UTC m=+147.932098029" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.427939 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-7pq2b" podStartSLOduration=122.427929726 podStartE2EDuration="2m2.427929726s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.37800358 +0000 UTC m=+147.883608727" watchObservedRunningTime="2026-01-27 19:12:25.427929726 +0000 UTC m=+147.933534873" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.465717 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-rqtqp" podStartSLOduration=122.465699012 podStartE2EDuration="2m2.465699012s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.462009251 +0000 UTC m=+147.967614398" watchObservedRunningTime="2026-01-27 19:12:25.465699012 +0000 UTC m=+147.971304159" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.482369 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.482821 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.98275876 +0000 UTC m=+148.488363907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.483104 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.483623 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:25.983601596 +0000 UTC m=+148.489206963 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.502373 4776 csr.go:261] certificate signing request csr-mxfnd is approved, waiting to be issued Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.516490 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-fkb4r" podStartSLOduration=122.516466404 podStartE2EDuration="2m2.516466404s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.514543046 +0000 UTC m=+148.020148213" watchObservedRunningTime="2026-01-27 19:12:25.516466404 +0000 UTC m=+148.022071561" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.535785 4776 csr.go:257] certificate signing request csr-mxfnd is issued Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.570076 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" podStartSLOduration=122.570050022 podStartE2EDuration="2m2.570050022s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.556973635 +0000 UTC m=+148.062578802" watchObservedRunningTime="2026-01-27 19:12:25.570050022 +0000 UTC m=+148.075655169" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.584484 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.584894 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.084870312 +0000 UTC m=+148.590475459 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.596205 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" podStartSLOduration=122.596177445 podStartE2EDuration="2m2.596177445s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.585225813 +0000 UTC m=+148.090830960" watchObservedRunningTime="2026-01-27 19:12:25.596177445 +0000 UTC m=+148.101782592" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.623389 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k4vpf" podStartSLOduration=122.623371321 podStartE2EDuration="2m2.623371321s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.62006271 +0000 UTC m=+148.125667887" watchObservedRunningTime="2026-01-27 19:12:25.623371321 +0000 UTC m=+148.128976478" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.623751 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.624151 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.626850 4776 patch_prober.go:28] interesting pod/apiserver-76f77b778f-9xx7d container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.626952 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" podUID="dafb70f2-b919-42f3-9f40-9f6183db8369" containerName="openshift-apiserver" probeResult="failure" output="Get \"https://10.217.0.5:8443/livez\": dial tcp 10.217.0.5:8443: connect: connection refused" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.644140 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.644200 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.662828 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" podStartSLOduration=122.662810919 podStartE2EDuration="2m2.662810919s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.661377306 +0000 UTC m=+148.166982453" watchObservedRunningTime="2026-01-27 19:12:25.662810919 +0000 UTC m=+148.168416066" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.686315 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.686675 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.186659453 +0000 UTC m=+148.692264590 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.741887 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-r869q" podStartSLOduration=122.74186796 podStartE2EDuration="2m2.74186796s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.706425983 +0000 UTC m=+148.212031150" watchObservedRunningTime="2026-01-27 19:12:25.74186796 +0000 UTC m=+148.247473107" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.743963 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-nvrdm" podStartSLOduration=122.743952503 podStartE2EDuration="2m2.743952503s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:25.741401265 +0000 UTC m=+148.247006412" watchObservedRunningTime="2026-01-27 19:12:25.743952503 +0000 UTC m=+148.249557650" Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.787840 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.788291 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.288265638 +0000 UTC m=+148.793870795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.890212 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.890642 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.390620497 +0000 UTC m=+148.896225644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.991895 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.992092 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.492064757 +0000 UTC m=+148.997669904 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:25 crc kubenswrapper[4776]: I0127 19:12:25.992414 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:25 crc kubenswrapper[4776]: E0127 19:12:25.992751 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.492741138 +0000 UTC m=+148.998346345 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.017734 4776 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-p5zwn container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.017868 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" podUID="68c010e7-6718-48f9-b901-f6047901b8b2" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.023314 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" event={"ID":"986fcda6-5c06-431b-bba6-d108dbeb3d9f","Type":"ContainerStarted","Data":"38dcd3cc01997f8f7b42602244353e054f90cd7fd1a44bd3e12dd2791ad49dcb"} Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.024259 4776 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-gf6ml container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" start-of-body= Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.024304 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" podUID="b05a7129-f82d-43ee-ac7e-9c2d1122a4d7" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.024484 4776 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tqlbs container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.024531 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.026949 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.027232 4776 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-dghnj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" start-of-body= Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.027262 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.12:6443/healthz\": dial tcp 10.217.0.12:6443: connect: connection refused" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.093535 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.093707 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.593679873 +0000 UTC m=+149.099285020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.093900 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.094624 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.094689 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.094740 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.094988 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.097912 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.597901402 +0000 UTC m=+149.103506539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.099671 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.100403 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.123640 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.124935 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.133428 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.155394 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.184023 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:26 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:26 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:26 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.184079 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.197654 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.198500 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.698483767 +0000 UTC m=+149.204088914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.299469 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.299861 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.799848214 +0000 UTC m=+149.305453361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.362438 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.370393 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.401178 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.401531 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:26.901515592 +0000 UTC m=+149.407120739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.502909 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.503230 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.003217531 +0000 UTC m=+149.508822678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.538831 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-27 19:07:25 +0000 UTC, rotation deadline is 2026-11-17 06:22:35.451037678 +0000 UTC Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.539223 4776 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7043h10m8.911818864s for next certificate rotation Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.605153 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.605529 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.105510717 +0000 UTC m=+149.611115864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.687385 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-p5zwn" Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.707101 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.707400 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.207388361 +0000 UTC m=+149.712993508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.808529 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.808854 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.308836612 +0000 UTC m=+149.814441759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:26 crc kubenswrapper[4776]: I0127 19:12:26.911505 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:26 crc kubenswrapper[4776]: E0127 19:12:26.911882 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.411867061 +0000 UTC m=+149.917472208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.012370 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.012733 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.512718334 +0000 UTC m=+150.018323481 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.069744 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"02aebf33acc56bf629471aa4395f333ccf11c9f92af059bb6ca0aed794a92289"} Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.088054 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" event={"ID":"986fcda6-5c06-431b-bba6-d108dbeb3d9f","Type":"ContainerStarted","Data":"80396955a29dd91374e9f9246cbe6da0735979f04337689a6fbdba3ba93be0e8"} Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.109938 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-5k2fv" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.113680 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.114001 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.613989589 +0000 UTC m=+150.119594736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.201763 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.202744 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.204729 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.219318 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.220633 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.720618288 +0000 UTC m=+150.226223435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.238683 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.291448 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:27 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:27 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:27 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.291497 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.322536 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.322964 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.322991 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27rjp\" (UniqueName: \"kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.323014 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.323920 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.823908184 +0000 UTC m=+150.329513331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.430249 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.430494 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27rjp\" (UniqueName: \"kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.430529 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.430561 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.430959 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.431041 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:27.931023066 +0000 UTC m=+150.436628223 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.431461 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.531335 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.531607 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.031595041 +0000 UTC m=+150.537200188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.558752 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.565430 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.572785 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.593062 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27rjp\" (UniqueName: \"kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp\") pod \"community-operators-fqbt9\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.612844 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.613727 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.614628 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.618725 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.626470 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.636282 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.636561 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.136546348 +0000 UTC m=+150.642151495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738555 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738608 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738644 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtlv7\" (UniqueName: \"kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738668 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv9lz\" (UniqueName: \"kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738701 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738723 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.738743 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.739122 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.239106612 +0000 UTC m=+150.744711759 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.785220 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.796267 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.809556 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840552 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840752 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840804 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840826 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840933 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840955 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtlv7\" (UniqueName: \"kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.840977 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv9lz\" (UniqueName: \"kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.841379 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.341360648 +0000 UTC m=+150.846965795 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.841826 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.842240 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.842676 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.845599 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.913146 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtlv7\" (UniqueName: \"kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7\") pod \"certified-operators-9m7mm\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.922058 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv9lz\" (UniqueName: \"kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz\") pod \"community-operators-lhtpz\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.949147 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.950698 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkr8h\" (UniqueName: \"kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.950821 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.950904 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:27 crc kubenswrapper[4776]: I0127 19:12:27.950975 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:27 crc kubenswrapper[4776]: E0127 19:12:27.951307 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.451295827 +0000 UTC m=+150.956900974 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.032756 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.054173 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.054437 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkr8h\" (UniqueName: \"kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.054472 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.054490 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.054911 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.054976 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.554960435 +0000 UTC m=+151.060565582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.055422 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.104685 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkr8h\" (UniqueName: \"kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h\") pod \"certified-operators-jsvqs\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.137672 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"d5978d8dfdd459452fa704e194bb910e092bc90d9f6dc5620cd9181c6252a331"} Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.156270 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.156784 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.656752396 +0000 UTC m=+151.162357613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.167795 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.183922 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:28 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:28 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:28 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.183972 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.201115 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" event={"ID":"986fcda6-5c06-431b-bba6-d108dbeb3d9f","Type":"ContainerStarted","Data":"2805118efbe3c1661d8eb4ff7d79b568f67f3deb72b3effa6d17027f31b569f3"} Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.256422 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0cce01ba4a03fa26308ddd358472495c41869384f462bef66654f77eb1a4abec"} Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.256904 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.257228 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.757214317 +0000 UTC m=+151.262819464 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.259958 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6f22ef92918b958a8d0efd08884512af86f1c0af688e1219b615766a46709f1b"} Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.358861 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.360075 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.86005082 +0000 UTC m=+151.365655967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.448890 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.449447 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.451877 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.452200 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.460746 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.462120 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:28.962100019 +0000 UTC m=+151.467705166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.511028 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.566802 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.566906 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.566940 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.567370 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.067353626 +0000 UTC m=+151.572958773 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.641766 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.669200 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.669425 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.669473 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.669596 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.669678 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.169662042 +0000 UTC m=+151.675267189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.757533 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.779454 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.780109 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.280095997 +0000 UTC m=+151.785701134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.802103 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.831228 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.881364 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.881684 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.381667621 +0000 UTC m=+151.887272768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.960548 4776 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 27 19:12:28 crc kubenswrapper[4776]: I0127 19:12:28.983671 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:28 crc kubenswrapper[4776]: E0127 19:12:28.984072 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.484058041 +0000 UTC m=+151.989663188 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.015138 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.086313 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:29 crc kubenswrapper[4776]: E0127 19:12:29.086638 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.586624835 +0000 UTC m=+152.092229972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.175217 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.186812 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.188038 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: E0127 19:12:29.188563 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.68853342 +0000 UTC m=+152.194138567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.188675 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:29 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:29 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:29 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.188732 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.197513 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.222123 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.264556 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.289474 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:29 crc kubenswrapper[4776]: E0127 19:12:29.289680 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.789642881 +0000 UTC m=+152.295248028 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.289815 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.289847 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkft8\" (UniqueName: \"kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.289898 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.289978 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: E0127 19:12:29.290335 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.790322381 +0000 UTC m=+152.295927528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zh827" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.292940 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" event={"ID":"986fcda6-5c06-431b-bba6-d108dbeb3d9f","Type":"ContainerStarted","Data":"c6ee7558a5a9b992a52cfce9dface372185cf51ca84c0108ef5d46f7ae1a20df"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.302533 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerStarted","Data":"8d16a346a2f2ddb82bc5cec9163037441b0f9b57a5ec6fd2fdc0c935a9cf8200"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.310912 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"78374e9a8753dfade8dd1e4567da132a2d7a9d492587eaaca1e3fe09ba0a2ab4"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.311178 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.315861 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerStarted","Data":"0a7cd1b0e960fbf055cd25f7b5d5a1d12680a3b37ec3ae3a185c3fda7f02a6e5"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.327942 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-r28v4" podStartSLOduration=11.327928703 podStartE2EDuration="11.327928703s" podCreationTimestamp="2026-01-27 19:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:29.320949821 +0000 UTC m=+151.826554988" watchObservedRunningTime="2026-01-27 19:12:29.327928703 +0000 UTC m=+151.833533850" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.343608 4776 generic.go:334] "Generic (PLEG): container finished" podID="fde48346-25d2-4251-b8f7-2e333cba2144" containerID="4f918583c7a71365666f6866107bce1d511c7c1f9fb43e0ea18868def4e779d5" exitCode=0 Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.343743 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerDied","Data":"4f918583c7a71365666f6866107bce1d511c7c1f9fb43e0ea18868def4e779d5"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.343791 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerStarted","Data":"6ef4413c92eb68b769e1bccb1032c33b6747fcbd64b2b6328eb2969ca54422cc"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.354415 4776 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.378883 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1c9ed57587b8c8f2e81a69edb595d0b6d4abe5c99572d0215b81a7a85449047a"} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.396556 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.396814 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.396917 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.396939 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkft8\" (UniqueName: \"kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: E0127 19:12:29.397303 4776 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-27 19:12:29.897284099 +0000 UTC m=+152.402889256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.402651 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.404850 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.424530 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkft8\" (UniqueName: \"kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8\") pod \"redhat-marketplace-4q55b\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.465808 4776 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-27T19:12:28.960574867Z","Handler":null,"Name":""} Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.474477 4776 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.474520 4776 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.497668 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.503912 4776 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.503945 4776 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.557413 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zh827\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.568272 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.576127 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.583729 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.591066 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.596345 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.601323 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.613546 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.683029 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.704697 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.704790 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xd6w\" (UniqueName: \"kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.704962 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.805866 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xd6w\" (UniqueName: \"kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.806284 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.806343 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.806866 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.806945 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.833561 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xd6w\" (UniqueName: \"kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w\") pod \"redhat-marketplace-cp674\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.876684 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:12:29 crc kubenswrapper[4776]: W0127 19:12:29.897677 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d346b77_e45d_4fb8_be3e_9b82fce112eb.slice/crio-7bc1ef1e3a199d26d92fd9b9c433e535e9baff86634fac27d41d7ab9d405bbab WatchSource:0}: Error finding container 7bc1ef1e3a199d26d92fd9b9c433e535e9baff86634fac27d41d7ab9d405bbab: Status 404 returned error can't find the container with id 7bc1ef1e3a199d26d92fd9b9c433e535e9baff86634fac27d41d7ab9d405bbab Jan 27 19:12:29 crc kubenswrapper[4776]: I0127 19:12:29.927250 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.070241 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:12:30 crc kubenswrapper[4776]: W0127 19:12:30.096669 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecca5f11_1043_4e40_b554_8a63ecc36423.slice/crio-54d57f282b95456b13c7af5486ab36331a80cdeed5bcd511db9c3fe669eaa9ba WatchSource:0}: Error finding container 54d57f282b95456b13c7af5486ab36331a80cdeed5bcd511db9c3fe669eaa9ba: Status 404 returned error can't find the container with id 54d57f282b95456b13c7af5486ab36331a80cdeed5bcd511db9c3fe669eaa9ba Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.180581 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:30 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:30 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:30 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.181019 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.272502 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.335353 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.387330 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerStarted","Data":"7961eba373b54d372f0b7a1e4ae3f4b99a9cc81864aef54aabfd1ab449456ca8"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.388561 4776 generic.go:334] "Generic (PLEG): container finished" podID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerID="aa6ace086f65882dd1bcafdb737f91084ff76c6783ee2d979a367ce0c6644796" exitCode=0 Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.388628 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerDied","Data":"aa6ace086f65882dd1bcafdb737f91084ff76c6783ee2d979a367ce0c6644796"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.388653 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerStarted","Data":"7bc1ef1e3a199d26d92fd9b9c433e535e9baff86634fac27d41d7ab9d405bbab"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.391671 4776 generic.go:334] "Generic (PLEG): container finished" podID="245499a3-8fea-446c-8a3e-daf9df73bf15" containerID="f92e22136e4ac7e623862380f3e99463c00687eaebacb083870bfff8437adbd7" exitCode=0 Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.391731 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" event={"ID":"245499a3-8fea-446c-8a3e-daf9df73bf15","Type":"ContainerDied","Data":"f92e22136e4ac7e623862380f3e99463c00687eaebacb083870bfff8437adbd7"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.392787 4776 generic.go:334] "Generic (PLEG): container finished" podID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerID="4538b12d105c1b3c50a3c3d2f5b261060c8fde384cc7624ceb6cf5bfe2d042b8" exitCode=0 Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.392830 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerDied","Data":"4538b12d105c1b3c50a3c3d2f5b261060c8fde384cc7624ceb6cf5bfe2d042b8"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.392850 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerStarted","Data":"b1b052108485a38ca5ae6a6e9e72fe69b12e352e1dc8b04c6320a00409c4ff24"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.435693 4776 generic.go:334] "Generic (PLEG): container finished" podID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerID="ab80765e55519eaf15063b3c725132c39b270aa0bc4d24fc6ab119c669f941e7" exitCode=0 Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.435786 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerDied","Data":"ab80765e55519eaf15063b3c725132c39b270aa0bc4d24fc6ab119c669f941e7"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.449145 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a","Type":"ContainerStarted","Data":"8ff7f136b4f231bcd8dbce3b9ebec20ff6371dfd74b7c50d202d6c53fac7e2b6"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.449194 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a","Type":"ContainerStarted","Data":"d7b71cea2c382a0302ea3e0f1e53adda5c7b4d8f2a1c02af0958f0dfb1c84299"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.458631 4776 generic.go:334] "Generic (PLEG): container finished" podID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerID="0691f13cc5821bee0bfc9d3f10db30c078c2b2394cb3172b02e21ab05a325196" exitCode=0 Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.459881 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerDied","Data":"0691f13cc5821bee0bfc9d3f10db30c078c2b2394cb3172b02e21ab05a325196"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.467983 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" event={"ID":"ecca5f11-1043-4e40-b554-8a63ecc36423","Type":"ContainerStarted","Data":"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.468030 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" event={"ID":"ecca5f11-1043-4e40-b554-8a63ecc36423","Type":"ContainerStarted","Data":"54d57f282b95456b13c7af5486ab36331a80cdeed5bcd511db9c3fe669eaa9ba"} Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.532222 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.532203696 podStartE2EDuration="2.532203696s" podCreationTimestamp="2026-01-27 19:12:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:30.527038749 +0000 UTC m=+153.032643896" watchObservedRunningTime="2026-01-27 19:12:30.532203696 +0000 UTC m=+153.037808843" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.566038 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.567382 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.568944 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.591895 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.605450 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" podStartSLOduration=127.605428839 podStartE2EDuration="2m7.605428839s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:30.592685762 +0000 UTC m=+153.098290919" watchObservedRunningTime="2026-01-27 19:12:30.605428839 +0000 UTC m=+153.111034006" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.627490 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.632666 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-9xx7d" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.650733 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.744461 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.744505 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.744575 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mqdx\" (UniqueName: \"kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.848351 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.848507 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mqdx\" (UniqueName: \"kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.848855 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.849717 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.850050 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.887242 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.887305 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.890495 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.890690 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.893185 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mqdx\" (UniqueName: \"kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx\") pod \"redhat-operators-88bn9\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.926029 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.957663 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.959589 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:30 crc kubenswrapper[4776]: I0127 19:12:30.966708 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.021406 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2qjhm" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.055658 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.056064 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kttvr\" (UniqueName: \"kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.056145 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.077232 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-894r8" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.158999 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.159159 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.159226 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kttvr\" (UniqueName: \"kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.161202 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.161199 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.180011 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.194086 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:31 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:31 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:31 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.194145 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.203721 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.203783 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.209384 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kttvr\" (UniqueName: \"kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr\") pod \"redhat-operators-r76hn\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.209873 4776 patch_prober.go:28] interesting pod/console-f9d7485db-vnp9r container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.209914 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-vnp9r" podUID="039a9234-6e8f-4854-b694-478409aacf35" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.254651 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.291099 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.413941 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.476172 4776 generic.go:334] "Generic (PLEG): container finished" podID="c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" containerID="8ff7f136b4f231bcd8dbce3b9ebec20ff6371dfd74b7c50d202d6c53fac7e2b6" exitCode=0 Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.476664 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a","Type":"ContainerDied","Data":"8ff7f136b4f231bcd8dbce3b9ebec20ff6371dfd74b7c50d202d6c53fac7e2b6"} Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.493390 4776 generic.go:334] "Generic (PLEG): container finished" podID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerID="1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e" exitCode=0 Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.493514 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerDied","Data":"1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e"} Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.523444 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerStarted","Data":"6e0e990903fbffaa4267f1fddba17b619b3d2a439d3cb79ea7f144a34fc1cf9e"} Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.524393 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.652253 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9d8s8" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.694076 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:12:31 crc kubenswrapper[4776]: I0127 19:12:31.696159 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-gf6ml" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.008267 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.066504 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:12:32 crc kubenswrapper[4776]: W0127 19:12:32.093033 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod38b6fb13_3a52_4ed0_be81_b9a0900fb2c4.slice/crio-3a7db66d509ca9c6e7a750be6a523ee5d0d2ca7beadb5da8d4ce44f9d9cd4e78 WatchSource:0}: Error finding container 3a7db66d509ca9c6e7a750be6a523ee5d0d2ca7beadb5da8d4ce44f9d9cd4e78: Status 404 returned error can't find the container with id 3a7db66d509ca9c6e7a750be6a523ee5d0d2ca7beadb5da8d4ce44f9d9cd4e78 Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.106547 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plkp7\" (UniqueName: \"kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7\") pod \"245499a3-8fea-446c-8a3e-daf9df73bf15\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.106609 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume\") pod \"245499a3-8fea-446c-8a3e-daf9df73bf15\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.106687 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume\") pod \"245499a3-8fea-446c-8a3e-daf9df73bf15\" (UID: \"245499a3-8fea-446c-8a3e-daf9df73bf15\") " Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.107796 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume" (OuterVolumeSpecName: "config-volume") pod "245499a3-8fea-446c-8a3e-daf9df73bf15" (UID: "245499a3-8fea-446c-8a3e-daf9df73bf15"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.117363 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "245499a3-8fea-446c-8a3e-daf9df73bf15" (UID: "245499a3-8fea-446c-8a3e-daf9df73bf15"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.121709 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7" (OuterVolumeSpecName: "kube-api-access-plkp7") pod "245499a3-8fea-446c-8a3e-daf9df73bf15" (UID: "245499a3-8fea-446c-8a3e-daf9df73bf15"). InnerVolumeSpecName "kube-api-access-plkp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.179598 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:32 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:32 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:32 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.179669 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.208429 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plkp7\" (UniqueName: \"kubernetes.io/projected/245499a3-8fea-446c-8a3e-daf9df73bf15-kube-api-access-plkp7\") on node \"crc\" DevicePath \"\"" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.208467 4776 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/245499a3-8fea-446c-8a3e-daf9df73bf15-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.208476 4776 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/245499a3-8fea-446c-8a3e-daf9df73bf15-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.585905 4776 generic.go:334] "Generic (PLEG): container finished" podID="5807670f-4076-457a-81d3-218df5584035" containerID="1a52916c9346a7d8d581b1d3a77b6cc7d29a8861e8968dfa074f59d9d7c861f4" exitCode=0 Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.586593 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerDied","Data":"1a52916c9346a7d8d581b1d3a77b6cc7d29a8861e8968dfa074f59d9d7c861f4"} Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.622846 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerStarted","Data":"bbd00764e3cdab6c831f7071fdbe990c98c5c5529c52f400ad6eca8be2b47910"} Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.622893 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerStarted","Data":"3a7db66d509ca9c6e7a750be6a523ee5d0d2ca7beadb5da8d4ce44f9d9cd4e78"} Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.626255 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.626882 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492340-zdtq6" event={"ID":"245499a3-8fea-446c-8a3e-daf9df73bf15","Type":"ContainerDied","Data":"b9ebcf237a012d96c5ceb0eba56b69ca7666854b8fddd92272eb48b8381e29ab"} Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.626901 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9ebcf237a012d96c5ceb0eba56b69ca7666854b8fddd92272eb48b8381e29ab" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.736012 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 19:12:32 crc kubenswrapper[4776]: E0127 19:12:32.736262 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="245499a3-8fea-446c-8a3e-daf9df73bf15" containerName="collect-profiles" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.736275 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="245499a3-8fea-446c-8a3e-daf9df73bf15" containerName="collect-profiles" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.736407 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="245499a3-8fea-446c-8a3e-daf9df73bf15" containerName="collect-profiles" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.736824 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.743306 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.743442 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.750251 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.820685 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.821220 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.912108 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.923117 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.923288 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.923454 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:32 crc kubenswrapper[4776]: I0127 19:12:32.948805 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.024500 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir\") pod \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.024574 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access\") pod \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\" (UID: \"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a\") " Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.024850 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" (UID: "c412e05d-c92d-4c3d-9bf5-5b716bf2c64a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.025973 4776 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.032859 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" (UID: "c412e05d-c92d-4c3d-9bf5-5b716bf2c64a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.055845 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.127601 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c412e05d-c92d-4c3d-9bf5-5b716bf2c64a-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.184493 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:33 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:33 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:33 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.184549 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.407754 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 27 19:12:33 crc kubenswrapper[4776]: W0127 19:12:33.439744 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podad2e3d5d_1099_447d_9f24_e6509689c18e.slice/crio-a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec WatchSource:0}: Error finding container a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec: Status 404 returned error can't find the container with id a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.580836 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-d7d6j" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.641333 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"ad2e3d5d-1099-447d-9f24-e6509689c18e","Type":"ContainerStarted","Data":"a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec"} Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.645535 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"c412e05d-c92d-4c3d-9bf5-5b716bf2c64a","Type":"ContainerDied","Data":"d7b71cea2c382a0302ea3e0f1e53adda5c7b4d8f2a1c02af0958f0dfb1c84299"} Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.645568 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.645593 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7b71cea2c382a0302ea3e0f1e53adda5c7b4d8f2a1c02af0958f0dfb1c84299" Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.658371 4776 generic.go:334] "Generic (PLEG): container finished" podID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerID="bbd00764e3cdab6c831f7071fdbe990c98c5c5529c52f400ad6eca8be2b47910" exitCode=0 Jan 27 19:12:33 crc kubenswrapper[4776]: I0127 19:12:33.658431 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerDied","Data":"bbd00764e3cdab6c831f7071fdbe990c98c5c5529c52f400ad6eca8be2b47910"} Jan 27 19:12:34 crc kubenswrapper[4776]: I0127 19:12:34.181601 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:34 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:34 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:34 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:34 crc kubenswrapper[4776]: I0127 19:12:34.181664 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:34 crc kubenswrapper[4776]: I0127 19:12:34.670162 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"ad2e3d5d-1099-447d-9f24-e6509689c18e","Type":"ContainerStarted","Data":"e237e42bf822abf45264ffb1c7e82328d9c2eec935297f3abc2b06bda8a165e9"} Jan 27 19:12:35 crc kubenswrapper[4776]: I0127 19:12:35.180349 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:35 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:35 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:35 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:35 crc kubenswrapper[4776]: I0127 19:12:35.180431 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:35 crc kubenswrapper[4776]: I0127 19:12:35.711283 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.711268486 podStartE2EDuration="3.711268486s" podCreationTimestamp="2026-01-27 19:12:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:12:35.704667295 +0000 UTC m=+158.210272442" watchObservedRunningTime="2026-01-27 19:12:35.711268486 +0000 UTC m=+158.216873633" Jan 27 19:12:36 crc kubenswrapper[4776]: I0127 19:12:36.180268 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:36 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:36 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:36 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:36 crc kubenswrapper[4776]: I0127 19:12:36.180317 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:36 crc kubenswrapper[4776]: I0127 19:12:36.747712 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-zszr6" Jan 27 19:12:36 crc kubenswrapper[4776]: I0127 19:12:36.776021 4776 generic.go:334] "Generic (PLEG): container finished" podID="ad2e3d5d-1099-447d-9f24-e6509689c18e" containerID="e237e42bf822abf45264ffb1c7e82328d9c2eec935297f3abc2b06bda8a165e9" exitCode=0 Jan 27 19:12:36 crc kubenswrapper[4776]: I0127 19:12:36.776072 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"ad2e3d5d-1099-447d-9f24-e6509689c18e","Type":"ContainerDied","Data":"e237e42bf822abf45264ffb1c7e82328d9c2eec935297f3abc2b06bda8a165e9"} Jan 27 19:12:37 crc kubenswrapper[4776]: I0127 19:12:37.181712 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:37 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:37 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:37 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:37 crc kubenswrapper[4776]: I0127 19:12:37.182023 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:38 crc kubenswrapper[4776]: I0127 19:12:38.179186 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:38 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:38 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:38 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:38 crc kubenswrapper[4776]: I0127 19:12:38.179253 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:38 crc kubenswrapper[4776]: I0127 19:12:38.298908 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:12:38 crc kubenswrapper[4776]: I0127 19:12:38.298970 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:12:39 crc kubenswrapper[4776]: I0127 19:12:39.178906 4776 patch_prober.go:28] interesting pod/router-default-5444994796-b9hnh container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 27 19:12:39 crc kubenswrapper[4776]: [-]has-synced failed: reason withheld Jan 27 19:12:39 crc kubenswrapper[4776]: [+]process-running ok Jan 27 19:12:39 crc kubenswrapper[4776]: healthz check failed Jan 27 19:12:39 crc kubenswrapper[4776]: I0127 19:12:39.178981 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b9hnh" podUID="fd5f8cc1-9cd6-4d77-b2c4-801e5d60945a" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.179627 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.181888 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-b9hnh" Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.886932 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.886982 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.886997 4776 patch_prober.go:28] interesting pod/downloads-7954f5f757-cdd4p container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" start-of-body= Jan 27 19:12:40 crc kubenswrapper[4776]: I0127 19:12:40.887050 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-cdd4p" podUID="9e2f4f0c-a78b-40dc-855f-a411b72183c5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.34:8080/\": dial tcp 10.217.0.34:8080: connect: connection refused" Jan 27 19:12:41 crc kubenswrapper[4776]: I0127 19:12:41.199220 4776 patch_prober.go:28] interesting pod/console-f9d7485db-vnp9r container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 27 19:12:41 crc kubenswrapper[4776]: I0127 19:12:41.199273 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-vnp9r" podUID="039a9234-6e8f-4854-b694-478409aacf35" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 27 19:12:46 crc kubenswrapper[4776]: I0127 19:12:46.060613 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:46 crc kubenswrapper[4776]: I0127 19:12:46.066783 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/28a63e45-994f-41fe-82fb-605486d13b98-metrics-certs\") pod \"network-metrics-daemon-t7tbg\" (UID: \"28a63e45-994f-41fe-82fb-605486d13b98\") " pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:46 crc kubenswrapper[4776]: I0127 19:12:46.144223 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-t7tbg" Jan 27 19:12:48 crc kubenswrapper[4776]: I0127 19:12:48.449412 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:12:48 crc kubenswrapper[4776]: I0127 19:12:48.449658 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" containerID="cri-o://11c42742ebf12903135d59fb08c2eca789adf6bdabb2b3e2e3b977d6c408604e" gracePeriod=30 Jan 27 19:12:48 crc kubenswrapper[4776]: I0127 19:12:48.486228 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:12:48 crc kubenswrapper[4776]: I0127 19:12:48.487219 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" containerID="cri-o://b161bf76f517d8d602a77e4f9e96d22a4813848ec37b62c0413aaea67aa540ea" gracePeriod=30 Jan 27 19:12:49 crc kubenswrapper[4776]: I0127 19:12:49.693292 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:12:49 crc kubenswrapper[4776]: I0127 19:12:49.869580 4776 generic.go:334] "Generic (PLEG): container finished" podID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerID="11c42742ebf12903135d59fb08c2eca789adf6bdabb2b3e2e3b977d6c408604e" exitCode=0 Jan 27 19:12:49 crc kubenswrapper[4776]: I0127 19:12:49.869639 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" event={"ID":"c944923b-00f1-4f4f-a883-aeb69d86c343","Type":"ContainerDied","Data":"11c42742ebf12903135d59fb08c2eca789adf6bdabb2b3e2e3b977d6c408604e"} Jan 27 19:12:50 crc kubenswrapper[4776]: I0127 19:12:50.611719 4776 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-cxv6m container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 27 19:12:50 crc kubenswrapper[4776]: I0127 19:12:50.612064 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 27 19:12:50 crc kubenswrapper[4776]: I0127 19:12:50.644452 4776 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rnpjf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 27 19:12:50 crc kubenswrapper[4776]: I0127 19:12:50.644758 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 27 19:12:50 crc kubenswrapper[4776]: I0127 19:12:50.905629 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-cdd4p" Jan 27 19:12:51 crc kubenswrapper[4776]: I0127 19:12:51.203348 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:51 crc kubenswrapper[4776]: I0127 19:12:51.207605 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-vnp9r" Jan 27 19:12:58 crc kubenswrapper[4776]: I0127 19:12:58.932922 4776 generic.go:334] "Generic (PLEG): container finished" podID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerID="b161bf76f517d8d602a77e4f9e96d22a4813848ec37b62c0413aaea67aa540ea" exitCode=0 Jan 27 19:12:58 crc kubenswrapper[4776]: I0127 19:12:58.932980 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" event={"ID":"dfecd87d-0e25-4116-ae11-72e607a011dd","Type":"ContainerDied","Data":"b161bf76f517d8d602a77e4f9e96d22a4813848ec37b62c0413aaea67aa540ea"} Jan 27 19:13:00 crc kubenswrapper[4776]: I0127 19:13:00.611090 4776 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-cxv6m container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 27 19:13:00 crc kubenswrapper[4776]: I0127 19:13:00.611442 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 27 19:13:01 crc kubenswrapper[4776]: I0127 19:13:01.019375 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-m657q" Jan 27 19:13:01 crc kubenswrapper[4776]: I0127 19:13:01.644216 4776 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-rnpjf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 19:13:01 crc kubenswrapper[4776]: I0127 19:13:01.644309 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.574699 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.578480 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access\") pod \"ad2e3d5d-1099-447d-9f24-e6509689c18e\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.581995 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.589839 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ad2e3d5d-1099-447d-9f24-e6509689c18e" (UID: "ad2e3d5d-1099-447d-9f24-e6509689c18e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.621449 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.621699 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wv9lz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lhtpz_openshift-marketplace(15735b7d-e2cc-4603-8cc2-d284fcf18ef8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.622963 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lhtpz" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.661076 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.661310 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-27rjp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-fqbt9_openshift-marketplace(fde48346-25d2-4251-b8f7-2e333cba2144): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:04 crc kubenswrapper[4776]: E0127 19:13:04.662536 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-fqbt9" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.679884 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir\") pod \"ad2e3d5d-1099-447d-9f24-e6509689c18e\" (UID: \"ad2e3d5d-1099-447d-9f24-e6509689c18e\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.679962 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca\") pod \"c944923b-00f1-4f4f-a883-aeb69d86c343\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.679999 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert\") pod \"c944923b-00f1-4f4f-a883-aeb69d86c343\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680025 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr4ws\" (UniqueName: \"kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws\") pod \"c944923b-00f1-4f4f-a883-aeb69d86c343\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680019 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ad2e3d5d-1099-447d-9f24-e6509689c18e" (UID: "ad2e3d5d-1099-447d-9f24-e6509689c18e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680064 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config\") pod \"c944923b-00f1-4f4f-a883-aeb69d86c343\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680103 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles\") pod \"c944923b-00f1-4f4f-a883-aeb69d86c343\" (UID: \"c944923b-00f1-4f4f-a883-aeb69d86c343\") " Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680271 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ad2e3d5d-1099-447d-9f24-e6509689c18e-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.680284 4776 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ad2e3d5d-1099-447d-9f24-e6509689c18e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.681432 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c944923b-00f1-4f4f-a883-aeb69d86c343" (UID: "c944923b-00f1-4f4f-a883-aeb69d86c343"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.681487 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config" (OuterVolumeSpecName: "config") pod "c944923b-00f1-4f4f-a883-aeb69d86c343" (UID: "c944923b-00f1-4f4f-a883-aeb69d86c343"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.681658 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca" (OuterVolumeSpecName: "client-ca") pod "c944923b-00f1-4f4f-a883-aeb69d86c343" (UID: "c944923b-00f1-4f4f-a883-aeb69d86c343"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.683682 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c944923b-00f1-4f4f-a883-aeb69d86c343" (UID: "c944923b-00f1-4f4f-a883-aeb69d86c343"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.688801 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws" (OuterVolumeSpecName: "kube-api-access-sr4ws") pod "c944923b-00f1-4f4f-a883-aeb69d86c343" (UID: "c944923b-00f1-4f4f-a883-aeb69d86c343"). InnerVolumeSpecName "kube-api-access-sr4ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.781983 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.782039 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c944923b-00f1-4f4f-a883-aeb69d86c343-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.782055 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr4ws\" (UniqueName: \"kubernetes.io/projected/c944923b-00f1-4f4f-a883-aeb69d86c343-kube-api-access-sr4ws\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.782118 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.782133 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c944923b-00f1-4f4f-a883-aeb69d86c343-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.964866 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"ad2e3d5d-1099-447d-9f24-e6509689c18e","Type":"ContainerDied","Data":"a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec"} Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.964899 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2f600d034e43091d7080cf9c0f95ce40f59c7fe82a2087cd7cc09e3ce6811ec" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.964947 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.970366 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.976968 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-rnpjf" event={"ID":"c944923b-00f1-4f4f-a883-aeb69d86c343","Type":"ContainerDied","Data":"0aeacce88ca5edd1e070c5c921d68051a6ea8689d0391f56a2e7e4bf054f17c2"} Jan 27 19:13:04 crc kubenswrapper[4776]: I0127 19:13:04.977102 4776 scope.go:117] "RemoveContainer" containerID="11c42742ebf12903135d59fb08c2eca789adf6bdabb2b3e2e3b977d6c408604e" Jan 27 19:13:05 crc kubenswrapper[4776]: I0127 19:13:05.035381 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:13:05 crc kubenswrapper[4776]: I0127 19:13:05.037701 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-rnpjf"] Jan 27 19:13:06 crc kubenswrapper[4776]: E0127 19:13:06.070360 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-fqbt9" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" Jan 27 19:13:06 crc kubenswrapper[4776]: E0127 19:13:06.070360 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-lhtpz" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" Jan 27 19:13:06 crc kubenswrapper[4776]: E0127 19:13:06.149698 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 27 19:13:06 crc kubenswrapper[4776]: E0127 19:13:06.150120 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fkr8h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jsvqs_openshift-marketplace(a87f66a9-fc0e-43d0-a4da-7e205ba8f81b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:06 crc kubenswrapper[4776]: E0127 19:13:06.151976 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jsvqs" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" Jan 27 19:13:06 crc kubenswrapper[4776]: I0127 19:13:06.325873 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" path="/var/lib/kubelet/pods/c944923b-00f1-4f4f-a883-aeb69d86c343/volumes" Jan 27 19:13:06 crc kubenswrapper[4776]: I0127 19:13:06.377725 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088326 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:07 crc kubenswrapper[4776]: E0127 19:13:07.088597 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088609 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: E0127 19:13:07.088622 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088627 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" Jan 27 19:13:07 crc kubenswrapper[4776]: E0127 19:13:07.088640 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2e3d5d-1099-447d-9f24-e6509689c18e" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088647 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2e3d5d-1099-447d-9f24-e6509689c18e" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088738 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="c412e05d-c92d-4c3d-9bf5-5b716bf2c64a" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088752 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="c944923b-00f1-4f4f-a883-aeb69d86c343" containerName="controller-manager" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.088759 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2e3d5d-1099-447d-9f24-e6509689c18e" containerName="pruner" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.089158 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.092371 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.092421 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.093656 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.094856 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.095034 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.095436 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.099530 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.099683 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.110617 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.110907 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.111057 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.111179 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.111308 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz85w\" (UniqueName: \"kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.214844 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz85w\" (UniqueName: \"kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.214934 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.214977 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.215014 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.215035 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.215947 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.216247 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.216406 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.225440 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.242035 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz85w\" (UniqueName: \"kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w\") pod \"controller-manager-7cc78794d4-tpqvt\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:07 crc kubenswrapper[4776]: I0127 19:13:07.420076 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:08 crc kubenswrapper[4776]: I0127 19:13:08.126710 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:08 crc kubenswrapper[4776]: I0127 19:13:08.299215 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:13:08 crc kubenswrapper[4776]: I0127 19:13:08.299283 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:13:09 crc kubenswrapper[4776]: E0127 19:13:09.169067 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jsvqs" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" Jan 27 19:13:09 crc kubenswrapper[4776]: E0127 19:13:09.256780 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 27 19:13:09 crc kubenswrapper[4776]: E0127 19:13:09.257257 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7mqdx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-88bn9_openshift-marketplace(5807670f-4076-457a-81d3-218df5584035): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:09 crc kubenswrapper[4776]: E0127 19:13:09.258419 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-88bn9" podUID="5807670f-4076-457a-81d3-218df5584035" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.329757 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.330612 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.333499 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.333694 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.337953 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.342915 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.342973 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.443963 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.444114 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.444247 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.468066 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:09 crc kubenswrapper[4776]: I0127 19:13:09.658193 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.563336 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-88bn9" podUID="5807670f-4076-457a-81d3-218df5584035" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.614193 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.614349 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kttvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-r76hn_openshift-marketplace(38b6fb13-3a52-4ed0-be81-b9a0900fb2c4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.615676 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-r76hn" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.648655 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.659342 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca\") pod \"dfecd87d-0e25-4116-ae11-72e607a011dd\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.659524 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config\") pod \"dfecd87d-0e25-4116-ae11-72e607a011dd\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.659562 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmwm9\" (UniqueName: \"kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9\") pod \"dfecd87d-0e25-4116-ae11-72e607a011dd\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.659596 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert\") pod \"dfecd87d-0e25-4116-ae11-72e607a011dd\" (UID: \"dfecd87d-0e25-4116-ae11-72e607a011dd\") " Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.661101 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config" (OuterVolumeSpecName: "config") pod "dfecd87d-0e25-4116-ae11-72e607a011dd" (UID: "dfecd87d-0e25-4116-ae11-72e607a011dd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.661639 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca" (OuterVolumeSpecName: "client-ca") pod "dfecd87d-0e25-4116-ae11-72e607a011dd" (UID: "dfecd87d-0e25-4116-ae11-72e607a011dd"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.666467 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9" (OuterVolumeSpecName: "kube-api-access-pmwm9") pod "dfecd87d-0e25-4116-ae11-72e607a011dd" (UID: "dfecd87d-0e25-4116-ae11-72e607a011dd"). InnerVolumeSpecName "kube-api-access-pmwm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.666905 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "dfecd87d-0e25-4116-ae11-72e607a011dd" (UID: "dfecd87d-0e25-4116-ae11-72e607a011dd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.770230 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.770344 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mtlv7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-9m7mm_openshift-marketplace(5eedb21e-27c3-4ca4-8a86-4ad640a95f82): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.770529 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.770578 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xkft8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4q55b_openshift-marketplace(5d346b77-e45d-4fb8-be3e-9b82fce112eb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.770836 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dfecd87d-0e25-4116-ae11-72e607a011dd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.770870 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.770881 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dfecd87d-0e25-4116-ae11-72e607a011dd-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.770893 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmwm9\" (UniqueName: \"kubernetes.io/projected/dfecd87d-0e25-4116-ae11-72e607a011dd-kube-api-access-pmwm9\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.772342 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4q55b" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.772396 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-9m7mm" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.781861 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.782084 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.782096 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.782194 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.782525 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.784150 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.855900 4776 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.856122 4776 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9xd6w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-cp674_openshift-marketplace(4a2aa1a0-6ad1-4edd-a71a-987976a2f323): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 27 19:13:10 crc kubenswrapper[4776]: E0127 19:13:10.857633 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-cp674" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.872707 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.872752 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.872944 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.872982 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgc77\" (UniqueName: \"kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.923881 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 27 19:13:10 crc kubenswrapper[4776]: W0127 19:13:10.934661 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod89742130_11e4_4222_ac4a_e8fabb586600.slice/crio-410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc WatchSource:0}: Error finding container 410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc: Status 404 returned error can't find the container with id 410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.973621 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.973942 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgc77\" (UniqueName: \"kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.973998 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.974027 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.975355 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.976379 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.977240 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:10 crc kubenswrapper[4776]: I0127 19:13:10.992105 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgc77\" (UniqueName: \"kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77\") pod \"route-controller-manager-5c6d6df84c-sqgzk\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:10.999970 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"89742130-11e4-4222-ac4a-e8fabb586600","Type":"ContainerStarted","Data":"410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc"} Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.002531 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.003943 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" event={"ID":"dfecd87d-0e25-4116-ae11-72e607a011dd","Type":"ContainerDied","Data":"11c72072ddd9687b71c60c27ad975f4cd711fc7725e05d087513bcf82a869eb4"} Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.004013 4776 scope.go:117] "RemoveContainer" containerID="b161bf76f517d8d602a77e4f9e96d22a4813848ec37b62c0413aaea67aa540ea" Jan 27 19:13:11 crc kubenswrapper[4776]: E0127 19:13:11.005332 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-r76hn" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" Jan 27 19:13:11 crc kubenswrapper[4776]: E0127 19:13:11.006088 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-cp674" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" Jan 27 19:13:11 crc kubenswrapper[4776]: E0127 19:13:11.006427 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-9m7mm" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" Jan 27 19:13:11 crc kubenswrapper[4776]: E0127 19:13:11.007272 4776 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4q55b" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.079284 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.082434 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m"] Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.126544 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.154094 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-t7tbg"] Jan 27 19:13:11 crc kubenswrapper[4776]: W0127 19:13:11.162449 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28a63e45_994f_41fe_82fb_605486d13b98.slice/crio-ea3485feeb963399de09ee14fff7b6aff64b6b7c596e624edf842dc10f75a753 WatchSource:0}: Error finding container ea3485feeb963399de09ee14fff7b6aff64b6b7c596e624edf842dc10f75a753: Status 404 returned error can't find the container with id ea3485feeb963399de09ee14fff7b6aff64b6b7c596e624edf842dc10f75a753 Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.171524 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.341151 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:11 crc kubenswrapper[4776]: W0127 19:13:11.358676 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f12ec4b_50ad_4a8c_abcb_629947b04b9c.slice/crio-8b7ba160a76537ea39dbdd976dcaeba3e950ff0497ccc79873c7ae7fcd86fbed WatchSource:0}: Error finding container 8b7ba160a76537ea39dbdd976dcaeba3e950ff0497ccc79873c7ae7fcd86fbed: Status 404 returned error can't find the container with id 8b7ba160a76537ea39dbdd976dcaeba3e950ff0497ccc79873c7ae7fcd86fbed Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.614200 4776 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-cxv6m container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 27 19:13:11 crc kubenswrapper[4776]: I0127 19:13:11.614527 4776 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-cxv6m" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.008385 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" event={"ID":"28a63e45-994f-41fe-82fb-605486d13b98","Type":"ContainerStarted","Data":"3a20ee1bf823abdd2a40c782eba744bdb2b75156b5309536f77ca6d5996f4dd5"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.008424 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" event={"ID":"28a63e45-994f-41fe-82fb-605486d13b98","Type":"ContainerStarted","Data":"5b2e2d6c53bd4fb1ea36207150e064b66c0a68d85b81064ce0afd4dfbbe51a84"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.008433 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-t7tbg" event={"ID":"28a63e45-994f-41fe-82fb-605486d13b98","Type":"ContainerStarted","Data":"ea3485feeb963399de09ee14fff7b6aff64b6b7c596e624edf842dc10f75a753"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.010269 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" event={"ID":"5f12ec4b-50ad-4a8c-abcb-629947b04b9c","Type":"ContainerStarted","Data":"ab992a288d1af84f5047c601e754b664e4bac67efb6d32ae675deb718a400688"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.010294 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" event={"ID":"5f12ec4b-50ad-4a8c-abcb-629947b04b9c","Type":"ContainerStarted","Data":"8b7ba160a76537ea39dbdd976dcaeba3e950ff0497ccc79873c7ae7fcd86fbed"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.010697 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.012587 4776 generic.go:334] "Generic (PLEG): container finished" podID="89742130-11e4-4222-ac4a-e8fabb586600" containerID="7bb578669fb182aed1f2ac4820d589951041999c9963039298a534be33970925" exitCode=0 Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.012669 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"89742130-11e4-4222-ac4a-e8fabb586600","Type":"ContainerDied","Data":"7bb578669fb182aed1f2ac4820d589951041999c9963039298a534be33970925"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.014067 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" event={"ID":"32ac2d9e-9651-4810-b7bd-fa945cef9b74","Type":"ContainerStarted","Data":"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.014117 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" event={"ID":"32ac2d9e-9651-4810-b7bd-fa945cef9b74","Type":"ContainerStarted","Data":"34dfc9835a70b6351f5e18e4f9823ba10917e6529f07a3690692121e842e8ae8"} Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.014185 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" podUID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" containerName="controller-manager" containerID="cri-o://aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d" gracePeriod=30 Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.014477 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.020136 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.041273 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-t7tbg" podStartSLOduration=169.041256015 podStartE2EDuration="2m49.041256015s" podCreationTimestamp="2026-01-27 19:10:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:12.027793667 +0000 UTC m=+194.533398814" watchObservedRunningTime="2026-01-27 19:13:12.041256015 +0000 UTC m=+194.546861162" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.042710 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" podStartSLOduration=24.042703729 podStartE2EDuration="24.042703729s" podCreationTimestamp="2026-01-27 19:12:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:12.041216864 +0000 UTC m=+194.546822011" watchObservedRunningTime="2026-01-27 19:13:12.042703729 +0000 UTC m=+194.548308876" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.061310 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" podStartSLOduration=4.061294154 podStartE2EDuration="4.061294154s" podCreationTimestamp="2026-01-27 19:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:12.056820158 +0000 UTC m=+194.562425305" watchObservedRunningTime="2026-01-27 19:13:12.061294154 +0000 UTC m=+194.566899301" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.174030 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.330264 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfecd87d-0e25-4116-ae11-72e607a011dd" path="/var/lib/kubelet/pods/dfecd87d-0e25-4116-ae11-72e607a011dd/volumes" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.384845 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.392191 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca\") pod \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.392243 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert\") pod \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.393548 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca" (OuterVolumeSpecName: "client-ca") pod "32ac2d9e-9651-4810-b7bd-fa945cef9b74" (UID: "32ac2d9e-9651-4810-b7bd-fa945cef9b74"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.397688 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "32ac2d9e-9651-4810-b7bd-fa945cef9b74" (UID: "32ac2d9e-9651-4810-b7bd-fa945cef9b74"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493301 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles\") pod \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493345 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config\") pod \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493366 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz85w\" (UniqueName: \"kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w\") pod \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\" (UID: \"32ac2d9e-9651-4810-b7bd-fa945cef9b74\") " Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493859 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493915 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/32ac2d9e-9651-4810-b7bd-fa945cef9b74-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493850 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "32ac2d9e-9651-4810-b7bd-fa945cef9b74" (UID: "32ac2d9e-9651-4810-b7bd-fa945cef9b74"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.493985 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config" (OuterVolumeSpecName: "config") pod "32ac2d9e-9651-4810-b7bd-fa945cef9b74" (UID: "32ac2d9e-9651-4810-b7bd-fa945cef9b74"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.495780 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w" (OuterVolumeSpecName: "kube-api-access-pz85w") pod "32ac2d9e-9651-4810-b7bd-fa945cef9b74" (UID: "32ac2d9e-9651-4810-b7bd-fa945cef9b74"). InnerVolumeSpecName "kube-api-access-pz85w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.594472 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.594501 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/32ac2d9e-9651-4810-b7bd-fa945cef9b74-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:12 crc kubenswrapper[4776]: I0127 19:13:12.594511 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz85w\" (UniqueName: \"kubernetes.io/projected/32ac2d9e-9651-4810-b7bd-fa945cef9b74-kube-api-access-pz85w\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.019462 4776 generic.go:334] "Generic (PLEG): container finished" podID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" containerID="aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d" exitCode=0 Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.019513 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" event={"ID":"32ac2d9e-9651-4810-b7bd-fa945cef9b74","Type":"ContainerDied","Data":"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d"} Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.019537 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.019554 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cc78794d4-tpqvt" event={"ID":"32ac2d9e-9651-4810-b7bd-fa945cef9b74","Type":"ContainerDied","Data":"34dfc9835a70b6351f5e18e4f9823ba10917e6529f07a3690692121e842e8ae8"} Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.019576 4776 scope.go:117] "RemoveContainer" containerID="aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.041920 4776 scope.go:117] "RemoveContainer" containerID="aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d" Jan 27 19:13:13 crc kubenswrapper[4776]: E0127 19:13:13.044149 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d\": container with ID starting with aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d not found: ID does not exist" containerID="aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.044191 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d"} err="failed to get container status \"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d\": rpc error: code = NotFound desc = could not find container \"aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d\": container with ID starting with aa5c5f2bec1eb564a40b80301f2d721966c4fd1232a0a3342b14219c3b9c768d not found: ID does not exist" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.049452 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.055418 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cc78794d4-tpqvt"] Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.111852 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:13 crc kubenswrapper[4776]: E0127 19:13:13.112228 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" containerName="controller-manager" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.112249 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" containerName="controller-manager" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.112430 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" containerName="controller-manager" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.113040 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.117105 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.117317 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.117509 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.118998 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.119228 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.119905 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.131635 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.135663 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.227220 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.305075 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.305131 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.305172 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.305201 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgzpv\" (UniqueName: \"kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.305248 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.406816 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access\") pod \"89742130-11e4-4222-ac4a-e8fabb586600\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.406849 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir\") pod \"89742130-11e4-4222-ac4a-e8fabb586600\" (UID: \"89742130-11e4-4222-ac4a-e8fabb586600\") " Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.406929 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgzpv\" (UniqueName: \"kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.406976 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.407007 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.407043 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.407073 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.407864 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.408453 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "89742130-11e4-4222-ac4a-e8fabb586600" (UID: "89742130-11e4-4222-ac4a-e8fabb586600"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.409676 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.409894 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.414989 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "89742130-11e4-4222-ac4a-e8fabb586600" (UID: "89742130-11e4-4222-ac4a-e8fabb586600"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.415702 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.424154 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgzpv\" (UniqueName: \"kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv\") pod \"controller-manager-6f46c99b54-mmmnk\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.431012 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.507696 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89742130-11e4-4222-ac4a-e8fabb586600-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.507729 4776 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/89742130-11e4-4222-ac4a-e8fabb586600-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:13 crc kubenswrapper[4776]: I0127 19:13:13.630572 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:13 crc kubenswrapper[4776]: W0127 19:13:13.636600 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd30d8301_8534_4750_a6ee_f628d52cfcfa.slice/crio-631c87ece2acb84198bcacaed03544719ebcf9ee6f9efdebcf2f90db197a67b1 WatchSource:0}: Error finding container 631c87ece2acb84198bcacaed03544719ebcf9ee6f9efdebcf2f90db197a67b1: Status 404 returned error can't find the container with id 631c87ece2acb84198bcacaed03544719ebcf9ee6f9efdebcf2f90db197a67b1 Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.026736 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" event={"ID":"d30d8301-8534-4750-a6ee-f628d52cfcfa","Type":"ContainerStarted","Data":"a94b61494f68531d08bd5e061e10e7164b1e7c5714f85ae22015198c6d6139c8"} Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.026998 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" event={"ID":"d30d8301-8534-4750-a6ee-f628d52cfcfa","Type":"ContainerStarted","Data":"631c87ece2acb84198bcacaed03544719ebcf9ee6f9efdebcf2f90db197a67b1"} Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.027012 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.029673 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"89742130-11e4-4222-ac4a-e8fabb586600","Type":"ContainerDied","Data":"410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc"} Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.029708 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="410ea11ee5d92a679deab2eb62433a85e1c8bdb529d0ccdc9850332a26763abc" Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.029857 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.031895 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.046225 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" podStartSLOduration=6.046208508 podStartE2EDuration="6.046208508s" podCreationTimestamp="2026-01-27 19:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:14.044870056 +0000 UTC m=+196.550475203" watchObservedRunningTime="2026-01-27 19:13:14.046208508 +0000 UTC m=+196.551813665" Jan 27 19:13:14 crc kubenswrapper[4776]: I0127 19:13:14.326123 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ac2d9e-9651-4810-b7bd-fa945cef9b74" path="/var/lib/kubelet/pods/32ac2d9e-9651-4810-b7bd-fa945cef9b74/volumes" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.136191 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 19:13:15 crc kubenswrapper[4776]: E0127 19:13:15.137326 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89742130-11e4-4222-ac4a-e8fabb586600" containerName="pruner" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.137463 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="89742130-11e4-4222-ac4a-e8fabb586600" containerName="pruner" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.137760 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="89742130-11e4-4222-ac4a-e8fabb586600" containerName="pruner" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.138710 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.143031 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.145314 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.153990 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.238191 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.238254 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.238281 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.339385 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.339603 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.340114 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.340155 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.340318 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.360830 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access\") pod \"installer-9-crc\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.473425 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:15 crc kubenswrapper[4776]: I0127 19:13:15.906859 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 27 19:13:15 crc kubenswrapper[4776]: W0127 19:13:15.909337 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod09b09d3e_80ec_4ad1_a3b0_f011077ceefa.slice/crio-dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17 WatchSource:0}: Error finding container dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17: Status 404 returned error can't find the container with id dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17 Jan 27 19:13:16 crc kubenswrapper[4776]: I0127 19:13:16.043031 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"09b09d3e-80ec-4ad1-a3b0-f011077ceefa","Type":"ContainerStarted","Data":"dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17"} Jan 27 19:13:17 crc kubenswrapper[4776]: I0127 19:13:17.051303 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"09b09d3e-80ec-4ad1-a3b0-f011077ceefa","Type":"ContainerStarted","Data":"cf5c1dc1e8166252bfd9d4df113b4dcafc73ec4db5a88161dbf29d1b86832561"} Jan 27 19:13:17 crc kubenswrapper[4776]: I0127 19:13:17.066678 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.0666611440000002 podStartE2EDuration="2.066661144s" podCreationTimestamp="2026-01-27 19:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:17.064959231 +0000 UTC m=+199.570564388" watchObservedRunningTime="2026-01-27 19:13:17.066661144 +0000 UTC m=+199.572266291" Jan 27 19:13:28 crc kubenswrapper[4776]: I0127 19:13:28.118708 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:28 crc kubenswrapper[4776]: I0127 19:13:28.120487 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" podUID="d30d8301-8534-4750-a6ee-f628d52cfcfa" containerName="controller-manager" containerID="cri-o://a94b61494f68531d08bd5e061e10e7164b1e7c5714f85ae22015198c6d6139c8" gracePeriod=30 Jan 27 19:13:28 crc kubenswrapper[4776]: I0127 19:13:28.131576 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:28 crc kubenswrapper[4776]: I0127 19:13:28.131765 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" podUID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" containerName="route-controller-manager" containerID="cri-o://ab992a288d1af84f5047c601e754b664e4bac67efb6d32ae675deb718a400688" gracePeriod=30 Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.115142 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerStarted","Data":"e79912acc1566aa032a31666ed121488e82d9f19f0ac7a5a14d212a413717b3b"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.121998 4776 generic.go:334] "Generic (PLEG): container finished" podID="d30d8301-8534-4750-a6ee-f628d52cfcfa" containerID="a94b61494f68531d08bd5e061e10e7164b1e7c5714f85ae22015198c6d6139c8" exitCode=0 Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.122381 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" event={"ID":"d30d8301-8534-4750-a6ee-f628d52cfcfa","Type":"ContainerDied","Data":"a94b61494f68531d08bd5e061e10e7164b1e7c5714f85ae22015198c6d6139c8"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.124002 4776 generic.go:334] "Generic (PLEG): container finished" podID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" containerID="ab992a288d1af84f5047c601e754b664e4bac67efb6d32ae675deb718a400688" exitCode=0 Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.124541 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" event={"ID":"5f12ec4b-50ad-4a8c-abcb-629947b04b9c","Type":"ContainerDied","Data":"ab992a288d1af84f5047c601e754b664e4bac67efb6d32ae675deb718a400688"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.152150 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerStarted","Data":"74758b1f431594f3be20c519721f5f00b765774c3c262bbbd4803294cb538917"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.154616 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.157127 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerStarted","Data":"d0b400833ec38fd654a2ca1e3213fba437f6dce8308e7ff0578af5e2be3f9bbe"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.164511 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerStarted","Data":"b3c1675ea16d0004eff7c6a54a470028cba8bbb12382137857bba51a2774285d"} Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.217939 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca\") pod \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.218021 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert\") pod \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.218061 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgc77\" (UniqueName: \"kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77\") pod \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.218091 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config\") pod \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\" (UID: \"5f12ec4b-50ad-4a8c-abcb-629947b04b9c\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.219529 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca" (OuterVolumeSpecName: "client-ca") pod "5f12ec4b-50ad-4a8c-abcb-629947b04b9c" (UID: "5f12ec4b-50ad-4a8c-abcb-629947b04b9c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.222642 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config" (OuterVolumeSpecName: "config") pod "5f12ec4b-50ad-4a8c-abcb-629947b04b9c" (UID: "5f12ec4b-50ad-4a8c-abcb-629947b04b9c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.227438 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77" (OuterVolumeSpecName: "kube-api-access-zgc77") pod "5f12ec4b-50ad-4a8c-abcb-629947b04b9c" (UID: "5f12ec4b-50ad-4a8c-abcb-629947b04b9c"). InnerVolumeSpecName "kube-api-access-zgc77". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.228894 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5f12ec4b-50ad-4a8c-abcb-629947b04b9c" (UID: "5f12ec4b-50ad-4a8c-abcb-629947b04b9c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.292500 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.320651 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.320685 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgc77\" (UniqueName: \"kubernetes.io/projected/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-kube-api-access-zgc77\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.320702 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.320714 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5f12ec4b-50ad-4a8c-abcb-629947b04b9c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.421423 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles\") pod \"d30d8301-8534-4750-a6ee-f628d52cfcfa\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.421481 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config\") pod \"d30d8301-8534-4750-a6ee-f628d52cfcfa\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.421521 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgzpv\" (UniqueName: \"kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv\") pod \"d30d8301-8534-4750-a6ee-f628d52cfcfa\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.421549 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca\") pod \"d30d8301-8534-4750-a6ee-f628d52cfcfa\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.421588 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert\") pod \"d30d8301-8534-4750-a6ee-f628d52cfcfa\" (UID: \"d30d8301-8534-4750-a6ee-f628d52cfcfa\") " Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.422209 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d30d8301-8534-4750-a6ee-f628d52cfcfa" (UID: "d30d8301-8534-4750-a6ee-f628d52cfcfa"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.422353 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config" (OuterVolumeSpecName: "config") pod "d30d8301-8534-4750-a6ee-f628d52cfcfa" (UID: "d30d8301-8534-4750-a6ee-f628d52cfcfa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.422878 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca" (OuterVolumeSpecName: "client-ca") pod "d30d8301-8534-4750-a6ee-f628d52cfcfa" (UID: "d30d8301-8534-4750-a6ee-f628d52cfcfa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.425299 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv" (OuterVolumeSpecName: "kube-api-access-bgzpv") pod "d30d8301-8534-4750-a6ee-f628d52cfcfa" (UID: "d30d8301-8534-4750-a6ee-f628d52cfcfa"). InnerVolumeSpecName "kube-api-access-bgzpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.425768 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d30d8301-8534-4750-a6ee-f628d52cfcfa" (UID: "d30d8301-8534-4750-a6ee-f628d52cfcfa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.522888 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d30d8301-8534-4750-a6ee-f628d52cfcfa-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.522936 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.522955 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.522974 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgzpv\" (UniqueName: \"kubernetes.io/projected/d30d8301-8534-4750-a6ee-f628d52cfcfa-kube-api-access-bgzpv\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:29 crc kubenswrapper[4776]: I0127 19:13:29.522991 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d30d8301-8534-4750-a6ee-f628d52cfcfa-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.011923 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-dghnj"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.108690 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:30 crc kubenswrapper[4776]: E0127 19:13:30.109129 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" containerName="route-controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.109140 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" containerName="route-controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: E0127 19:13:30.109160 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d30d8301-8534-4750-a6ee-f628d52cfcfa" containerName="controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.109166 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="d30d8301-8534-4750-a6ee-f628d52cfcfa" containerName="controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.109262 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" containerName="route-controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.109281 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="d30d8301-8534-4750-a6ee-f628d52cfcfa" containerName="controller-manager" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.109612 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.119432 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.175935 4776 generic.go:334] "Generic (PLEG): container finished" podID="5807670f-4076-457a-81d3-218df5584035" containerID="d0b400833ec38fd654a2ca1e3213fba437f6dce8308e7ff0578af5e2be3f9bbe" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.176035 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerDied","Data":"d0b400833ec38fd654a2ca1e3213fba437f6dce8308e7ff0578af5e2be3f9bbe"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.179630 4776 generic.go:334] "Generic (PLEG): container finished" podID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerID="e79912acc1566aa032a31666ed121488e82d9f19f0ac7a5a14d212a413717b3b" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.179669 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerDied","Data":"e79912acc1566aa032a31666ed121488e82d9f19f0ac7a5a14d212a413717b3b"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.181886 4776 generic.go:334] "Generic (PLEG): container finished" podID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerID="b3c1675ea16d0004eff7c6a54a470028cba8bbb12382137857bba51a2774285d" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.181924 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerDied","Data":"b3c1675ea16d0004eff7c6a54a470028cba8bbb12382137857bba51a2774285d"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.189221 4776 generic.go:334] "Generic (PLEG): container finished" podID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerID="d8fe70cc140fa046d49458fe43fbbc16fda77fc8bbec811e32d7b2b0ab7208ca" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.189286 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerDied","Data":"d8fe70cc140fa046d49458fe43fbbc16fda77fc8bbec811e32d7b2b0ab7208ca"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.192063 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.192116 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f46c99b54-mmmnk" event={"ID":"d30d8301-8534-4750-a6ee-f628d52cfcfa","Type":"ContainerDied","Data":"631c87ece2acb84198bcacaed03544719ebcf9ee6f9efdebcf2f90db197a67b1"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.192170 4776 scope.go:117] "RemoveContainer" containerID="a94b61494f68531d08bd5e061e10e7164b1e7c5714f85ae22015198c6d6139c8" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.194636 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" event={"ID":"5f12ec4b-50ad-4a8c-abcb-629947b04b9c","Type":"ContainerDied","Data":"8b7ba160a76537ea39dbdd976dcaeba3e950ff0497ccc79873c7ae7fcd86fbed"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.194718 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.204910 4776 generic.go:334] "Generic (PLEG): container finished" podID="fde48346-25d2-4251-b8f7-2e333cba2144" containerID="74758b1f431594f3be20c519721f5f00b765774c3c262bbbd4803294cb538917" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.204983 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerDied","Data":"74758b1f431594f3be20c519721f5f00b765774c3c262bbbd4803294cb538917"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.226637 4776 generic.go:334] "Generic (PLEG): container finished" podID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerID="18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.226679 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerDied","Data":"18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6"} Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.229144 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.229204 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.229223 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.229247 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.230351 4776 scope.go:117] "RemoveContainer" containerID="ab992a288d1af84f5047c601e754b664e4bac67efb6d32ae675deb718a400688" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.230894 4776 generic.go:334] "Generic (PLEG): container finished" podID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerID="23a304355defc7aa500aeb79ae4dd440776969fa5ce40af099440254919ec2eb" exitCode=0 Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.230973 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerDied","Data":"23a304355defc7aa500aeb79ae4dd440776969fa5ce40af099440254919ec2eb"} Jan 27 19:13:30 crc kubenswrapper[4776]: E0127 19:13:30.303614 4776 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd30d8301_8534_4750_a6ee_f628d52cfcfa.slice\": RecentStats: unable to find data in memory cache]" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.304256 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.306945 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5c6d6df84c-sqgzk"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.329715 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f12ec4b-50ad-4a8c-abcb-629947b04b9c" path="/var/lib/kubelet/pods/5f12ec4b-50ad-4a8c-abcb-629947b04b9c/volumes" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.329981 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.330109 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.331049 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.331092 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6f46c99b54-mmmnk"] Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.331376 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.332591 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.332515 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.333864 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.335222 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.343916 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv\") pod \"route-controller-manager-669ff57b9c-h4pks\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.449844 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:30 crc kubenswrapper[4776]: I0127 19:13:30.616655 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:30 crc kubenswrapper[4776]: W0127 19:13:30.624681 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d8c28b9_d6dc_4392_8569_09ad24b4f113.slice/crio-909f5144a03306f0435e23827eb189c23be12de309ebeb61ff757551d5cd8e5c WatchSource:0}: Error finding container 909f5144a03306f0435e23827eb189c23be12de309ebeb61ff757551d5cd8e5c: Status 404 returned error can't find the container with id 909f5144a03306f0435e23827eb189c23be12de309ebeb61ff757551d5cd8e5c Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.246502 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerStarted","Data":"ed516c774eb69c4922d73c21daad61bc3ad499299f07a118c45ec20a671421c8"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.248909 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerStarted","Data":"35b4e5bbeea3d5dac551288eb161bcad1154405519d85afaf32dae547a15f52b"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.251074 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerStarted","Data":"1534a46d102f2d0e9f20c5adab795c53a058178b9ce0d45b3d556dc5e4a2fff1"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.253313 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerStarted","Data":"e3173c73f7ba5611ae83d168557385796901aed06de9038f3bef8c33924d054c"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.255414 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerStarted","Data":"1e0825066f8ed9bfd72f6cc0ff2efa8924b350a8181cf351aa51bbb42f583163"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.259217 4776 generic.go:334] "Generic (PLEG): container finished" podID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerID="d12b15a36dcd0a0ca46ca2636b4919651751c8c3feddc3cbd7a1147b97259005" exitCode=0 Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.259269 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerDied","Data":"d12b15a36dcd0a0ca46ca2636b4919651751c8c3feddc3cbd7a1147b97259005"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.263888 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerStarted","Data":"6aaf7a2868b904e19ee19f774946bf52671b514d46b61ec02e48556c3f5b0d23"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.265831 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerStarted","Data":"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.267448 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" event={"ID":"6d8c28b9-d6dc-4392-8569-09ad24b4f113","Type":"ContainerStarted","Data":"5f70f6314e0cfdffc32ec094ec660b0455d4da20298f1241d64f144a80fc3fd2"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.267475 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" event={"ID":"6d8c28b9-d6dc-4392-8569-09ad24b4f113","Type":"ContainerStarted","Data":"909f5144a03306f0435e23827eb189c23be12de309ebeb61ff757551d5cd8e5c"} Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.268192 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.274951 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fqbt9" podStartSLOduration=2.791397003 podStartE2EDuration="1m4.274927757s" podCreationTimestamp="2026-01-27 19:12:27 +0000 UTC" firstStartedPulling="2026-01-27 19:12:29.354119509 +0000 UTC m=+151.859724656" lastFinishedPulling="2026-01-27 19:13:30.837650253 +0000 UTC m=+213.343255410" observedRunningTime="2026-01-27 19:13:31.272727898 +0000 UTC m=+213.778333035" watchObservedRunningTime="2026-01-27 19:13:31.274927757 +0000 UTC m=+213.780532904" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.291962 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.292028 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.324509 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jsvqs" podStartSLOduration=4.114630732 podStartE2EDuration="1m4.324489896s" podCreationTimestamp="2026-01-27 19:12:27 +0000 UTC" firstStartedPulling="2026-01-27 19:12:30.442591364 +0000 UTC m=+152.948196511" lastFinishedPulling="2026-01-27 19:13:30.652450528 +0000 UTC m=+213.158055675" observedRunningTime="2026-01-27 19:13:31.321894036 +0000 UTC m=+213.827499183" watchObservedRunningTime="2026-01-27 19:13:31.324489896 +0000 UTC m=+213.830095043" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.344252 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cp674" podStartSLOduration=3.173799757 podStartE2EDuration="1m2.344236475s" podCreationTimestamp="2026-01-27 19:12:29 +0000 UTC" firstStartedPulling="2026-01-27 19:12:31.502598165 +0000 UTC m=+154.008203312" lastFinishedPulling="2026-01-27 19:13:30.673034883 +0000 UTC m=+213.178640030" observedRunningTime="2026-01-27 19:13:31.341650936 +0000 UTC m=+213.847256103" watchObservedRunningTime="2026-01-27 19:13:31.344236475 +0000 UTC m=+213.849841642" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.363618 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" podStartSLOduration=3.363598663 podStartE2EDuration="3.363598663s" podCreationTimestamp="2026-01-27 19:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:31.359659181 +0000 UTC m=+213.865264328" watchObservedRunningTime="2026-01-27 19:13:31.363598663 +0000 UTC m=+213.869203810" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.377709 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lhtpz" podStartSLOduration=4.167083554 podStartE2EDuration="1m4.377690047s" podCreationTimestamp="2026-01-27 19:12:27 +0000 UTC" firstStartedPulling="2026-01-27 19:12:30.393973017 +0000 UTC m=+152.899578184" lastFinishedPulling="2026-01-27 19:13:30.60457953 +0000 UTC m=+213.110184677" observedRunningTime="2026-01-27 19:13:31.377268684 +0000 UTC m=+213.882873831" watchObservedRunningTime="2026-01-27 19:13:31.377690047 +0000 UTC m=+213.883295194" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.396992 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.404594 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4q55b" podStartSLOduration=1.987171984 podStartE2EDuration="1m2.404573807s" podCreationTimestamp="2026-01-27 19:12:29 +0000 UTC" firstStartedPulling="2026-01-27 19:12:30.389991076 +0000 UTC m=+152.895596243" lastFinishedPulling="2026-01-27 19:13:30.807392919 +0000 UTC m=+213.312998066" observedRunningTime="2026-01-27 19:13:31.403079061 +0000 UTC m=+213.908684228" watchObservedRunningTime="2026-01-27 19:13:31.404573807 +0000 UTC m=+213.910178974" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.423448 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r76hn" podStartSLOduration=3.320019667 podStartE2EDuration="1m1.423432678s" podCreationTimestamp="2026-01-27 19:12:30 +0000 UTC" firstStartedPulling="2026-01-27 19:12:32.624812775 +0000 UTC m=+155.130417922" lastFinishedPulling="2026-01-27 19:13:30.728225786 +0000 UTC m=+213.233830933" observedRunningTime="2026-01-27 19:13:31.422136279 +0000 UTC m=+213.927741436" watchObservedRunningTime="2026-01-27 19:13:31.423432678 +0000 UTC m=+213.929037825" Jan 27 19:13:31 crc kubenswrapper[4776]: I0127 19:13:31.446997 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-88bn9" podStartSLOduration=3.4176719269999998 podStartE2EDuration="1m1.446979535s" podCreationTimestamp="2026-01-27 19:12:30 +0000 UTC" firstStartedPulling="2026-01-27 19:12:32.591198274 +0000 UTC m=+155.096803421" lastFinishedPulling="2026-01-27 19:13:30.620505882 +0000 UTC m=+213.126111029" observedRunningTime="2026-01-27 19:13:31.442502208 +0000 UTC m=+213.948107365" watchObservedRunningTime="2026-01-27 19:13:31.446979535 +0000 UTC m=+213.952584682" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.110318 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.111243 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.113019 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.113461 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.114183 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.114953 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.115691 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.115886 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.126629 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.129904 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.259657 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.259702 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.259734 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.259798 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.259817 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnqfl\" (UniqueName: \"kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.274482 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerStarted","Data":"4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b"} Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.303213 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9m7mm" podStartSLOduration=4.079137908 podStartE2EDuration="1m5.303198257s" podCreationTimestamp="2026-01-27 19:12:27 +0000 UTC" firstStartedPulling="2026-01-27 19:12:30.462702165 +0000 UTC m=+152.968307312" lastFinishedPulling="2026-01-27 19:13:31.686762514 +0000 UTC m=+214.192367661" observedRunningTime="2026-01-27 19:13:32.301784394 +0000 UTC m=+214.807389541" watchObservedRunningTime="2026-01-27 19:13:32.303198257 +0000 UTC m=+214.808803404" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.326160 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d30d8301-8534-4750-a6ee-f628d52cfcfa" path="/var/lib/kubelet/pods/d30d8301-8534-4750-a6ee-f628d52cfcfa/volumes" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.360657 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.361742 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.361813 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnqfl\" (UniqueName: \"kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.362179 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.362232 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.362570 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.363132 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.363247 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.366378 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.376304 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnqfl\" (UniqueName: \"kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl\") pod \"controller-manager-7cd796b576-mphl4\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.425922 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.537184 4776 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r76hn" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="registry-server" probeResult="failure" output=< Jan 27 19:13:32 crc kubenswrapper[4776]: timeout: failed to connect service ":50051" within 1s Jan 27 19:13:32 crc kubenswrapper[4776]: > Jan 27 19:13:32 crc kubenswrapper[4776]: I0127 19:13:32.846706 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:33 crc kubenswrapper[4776]: I0127 19:13:33.294226 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" event={"ID":"c20a8a77-cfda-4d60-8707-794b092551d3","Type":"ContainerStarted","Data":"a2478aa844183a59da6f87ae27ac1189d9e52564cce0c3ee4ddb1fac5390c64e"} Jan 27 19:13:33 crc kubenswrapper[4776]: I0127 19:13:33.294309 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" event={"ID":"c20a8a77-cfda-4d60-8707-794b092551d3","Type":"ContainerStarted","Data":"685898f1b04d55763e0972e38901053bdcddf8e5cbff37a5203f53082b96aee6"} Jan 27 19:13:33 crc kubenswrapper[4776]: I0127 19:13:33.294464 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:33 crc kubenswrapper[4776]: I0127 19:13:33.302761 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:33 crc kubenswrapper[4776]: I0127 19:13:33.314150 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" podStartSLOduration=5.314129172 podStartE2EDuration="5.314129172s" podCreationTimestamp="2026-01-27 19:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:33.312641227 +0000 UTC m=+215.818246394" watchObservedRunningTime="2026-01-27 19:13:33.314129172 +0000 UTC m=+215.819734319" Jan 27 19:13:37 crc kubenswrapper[4776]: I0127 19:13:37.627724 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:13:37 crc kubenswrapper[4776]: I0127 19:13:37.628383 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:13:37 crc kubenswrapper[4776]: I0127 19:13:37.672082 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:13:37 crc kubenswrapper[4776]: I0127 19:13:37.950429 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:13:37 crc kubenswrapper[4776]: I0127 19:13:37.950485 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.001832 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.034362 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.034508 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.088815 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.168456 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.168515 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.223546 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.298735 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.298864 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.298952 4776 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.299875 4776 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde"} pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.299982 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" containerID="cri-o://4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde" gracePeriod=600 Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.359965 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.370131 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.375176 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.376640 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:13:38 crc kubenswrapper[4776]: I0127 19:13:38.955581 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.333018 4776 generic.go:334] "Generic (PLEG): container finished" podID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerID="4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde" exitCode=0 Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.333237 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerDied","Data":"4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde"} Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.591613 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.593263 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.627547 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.928453 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.930028 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:39 crc kubenswrapper[4776]: I0127 19:13:39.983231 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.338134 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lhtpz" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="registry-server" containerID="cri-o://1534a46d102f2d0e9f20c5adab795c53a058178b9ce0d45b3d556dc5e4a2fff1" gracePeriod=2 Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.390584 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.390839 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jsvqs" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="registry-server" containerID="cri-o://1e0825066f8ed9bfd72f6cc0ff2efa8924b350a8181cf351aa51bbb42f583163" gracePeriod=2 Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.411845 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.411913 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.926473 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.926654 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:13:40 crc kubenswrapper[4776]: I0127 19:13:40.981890 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.333514 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.349905 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab"} Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.354281 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerDied","Data":"1534a46d102f2d0e9f20c5adab795c53a058178b9ce0d45b3d556dc5e4a2fff1"} Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.354173 4776 generic.go:334] "Generic (PLEG): container finished" podID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerID="1534a46d102f2d0e9f20c5adab795c53a058178b9ce0d45b3d556dc5e4a2fff1" exitCode=0 Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.358018 4776 generic.go:334] "Generic (PLEG): container finished" podID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerID="1e0825066f8ed9bfd72f6cc0ff2efa8924b350a8181cf351aa51bbb42f583163" exitCode=0 Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.358621 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerDied","Data":"1e0825066f8ed9bfd72f6cc0ff2efa8924b350a8181cf351aa51bbb42f583163"} Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.390881 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:41 crc kubenswrapper[4776]: I0127 19:13:41.424569 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.360615 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.369988 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.373563 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jsvqs" event={"ID":"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b","Type":"ContainerDied","Data":"8d16a346a2f2ddb82bc5cec9163037441b0f9b57a5ec6fd2fdc0c935a9cf8200"} Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.373611 4776 scope.go:117] "RemoveContainer" containerID="1e0825066f8ed9bfd72f6cc0ff2efa8924b350a8181cf351aa51bbb42f583163" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.373643 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jsvqs" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.375919 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lhtpz" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.375930 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lhtpz" event={"ID":"15735b7d-e2cc-4603-8cc2-d284fcf18ef8","Type":"ContainerDied","Data":"b1b052108485a38ca5ae6a6e9e72fe69b12e352e1dc8b04c6320a00409c4ff24"} Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.407883 4776 scope.go:117] "RemoveContainer" containerID="d8fe70cc140fa046d49458fe43fbbc16fda77fc8bbec811e32d7b2b0ab7208ca" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.414444 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv9lz\" (UniqueName: \"kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz\") pod \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.414492 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities\") pod \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.414521 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content\") pod \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.414536 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content\") pod \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.414579 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkr8h\" (UniqueName: \"kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h\") pod \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\" (UID: \"a87f66a9-fc0e-43d0-a4da-7e205ba8f81b\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.416821 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities" (OuterVolumeSpecName: "utilities") pod "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" (UID: "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.425067 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz" (OuterVolumeSpecName: "kube-api-access-wv9lz") pod "15735b7d-e2cc-4603-8cc2-d284fcf18ef8" (UID: "15735b7d-e2cc-4603-8cc2-d284fcf18ef8"). InnerVolumeSpecName "kube-api-access-wv9lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.425289 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h" (OuterVolumeSpecName: "kube-api-access-fkr8h") pod "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" (UID: "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b"). InnerVolumeSpecName "kube-api-access-fkr8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.432959 4776 scope.go:117] "RemoveContainer" containerID="ab80765e55519eaf15063b3c725132c39b270aa0bc4d24fc6ab119c669f941e7" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.477517 4776 scope.go:117] "RemoveContainer" containerID="1534a46d102f2d0e9f20c5adab795c53a058178b9ce0d45b3d556dc5e4a2fff1" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.488885 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" (UID: "a87f66a9-fc0e-43d0-a4da-7e205ba8f81b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.496915 4776 scope.go:117] "RemoveContainer" containerID="e79912acc1566aa032a31666ed121488e82d9f19f0ac7a5a14d212a413717b3b" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.518244 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities\") pod \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\" (UID: \"15735b7d-e2cc-4603-8cc2-d284fcf18ef8\") " Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.518836 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv9lz\" (UniqueName: \"kubernetes.io/projected/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-kube-api-access-wv9lz\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.518952 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.519058 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.519150 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkr8h\" (UniqueName: \"kubernetes.io/projected/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b-kube-api-access-fkr8h\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.518949 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities" (OuterVolumeSpecName: "utilities") pod "15735b7d-e2cc-4603-8cc2-d284fcf18ef8" (UID: "15735b7d-e2cc-4603-8cc2-d284fcf18ef8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.523469 4776 scope.go:117] "RemoveContainer" containerID="4538b12d105c1b3c50a3c3d2f5b261060c8fde384cc7624ceb6cf5bfe2d042b8" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.619973 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.697412 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.700192 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jsvqs"] Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.753465 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.808574 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15735b7d-e2cc-4603-8cc2-d284fcf18ef8" (UID: "15735b7d-e2cc-4603-8cc2-d284fcf18ef8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:42 crc kubenswrapper[4776]: I0127 19:13:42.821751 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15735b7d-e2cc-4603-8cc2-d284fcf18ef8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.000847 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.007853 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lhtpz"] Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.384885 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cp674" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="registry-server" containerID="cri-o://d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98" gracePeriod=2 Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.850909 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.938605 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content\") pod \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.938739 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities\") pod \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.938869 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xd6w\" (UniqueName: \"kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w\") pod \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\" (UID: \"4a2aa1a0-6ad1-4edd-a71a-987976a2f323\") " Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.940382 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities" (OuterVolumeSpecName: "utilities") pod "4a2aa1a0-6ad1-4edd-a71a-987976a2f323" (UID: "4a2aa1a0-6ad1-4edd-a71a-987976a2f323"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.943986 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w" (OuterVolumeSpecName: "kube-api-access-9xd6w") pod "4a2aa1a0-6ad1-4edd-a71a-987976a2f323" (UID: "4a2aa1a0-6ad1-4edd-a71a-987976a2f323"). InnerVolumeSpecName "kube-api-access-9xd6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:43 crc kubenswrapper[4776]: I0127 19:13:43.967746 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a2aa1a0-6ad1-4edd-a71a-987976a2f323" (UID: "4a2aa1a0-6ad1-4edd-a71a-987976a2f323"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.040970 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.041011 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xd6w\" (UniqueName: \"kubernetes.io/projected/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-kube-api-access-9xd6w\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.041027 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a2aa1a0-6ad1-4edd-a71a-987976a2f323-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.325873 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" path="/var/lib/kubelet/pods/15735b7d-e2cc-4603-8cc2-d284fcf18ef8/volumes" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.326491 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" path="/var/lib/kubelet/pods/a87f66a9-fc0e-43d0-a4da-7e205ba8f81b/volumes" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.393321 4776 generic.go:334] "Generic (PLEG): container finished" podID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerID="d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98" exitCode=0 Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.393366 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cp674" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.393384 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerDied","Data":"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98"} Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.394563 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cp674" event={"ID":"4a2aa1a0-6ad1-4edd-a71a-987976a2f323","Type":"ContainerDied","Data":"7961eba373b54d372f0b7a1e4ae3f4b99a9cc81864aef54aabfd1ab449456ca8"} Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.394634 4776 scope.go:117] "RemoveContainer" containerID="d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.420862 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.423897 4776 scope.go:117] "RemoveContainer" containerID="18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.424945 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cp674"] Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.442216 4776 scope.go:117] "RemoveContainer" containerID="1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.477351 4776 scope.go:117] "RemoveContainer" containerID="d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98" Jan 27 19:13:44 crc kubenswrapper[4776]: E0127 19:13:44.477897 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98\": container with ID starting with d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98 not found: ID does not exist" containerID="d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.477955 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98"} err="failed to get container status \"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98\": rpc error: code = NotFound desc = could not find container \"d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98\": container with ID starting with d4fa408399be2b3f7bf2aaed9352911e4296aef2e544ee221208207bff37be98 not found: ID does not exist" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.477991 4776 scope.go:117] "RemoveContainer" containerID="18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6" Jan 27 19:13:44 crc kubenswrapper[4776]: E0127 19:13:44.478447 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6\": container with ID starting with 18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6 not found: ID does not exist" containerID="18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.478518 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6"} err="failed to get container status \"18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6\": rpc error: code = NotFound desc = could not find container \"18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6\": container with ID starting with 18d72ccd386aeccbcd42127b1bc4efb4ec445b712ee54359651428a239b7e3e6 not found: ID does not exist" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.478577 4776 scope.go:117] "RemoveContainer" containerID="1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e" Jan 27 19:13:44 crc kubenswrapper[4776]: E0127 19:13:44.479082 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e\": container with ID starting with 1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e not found: ID does not exist" containerID="1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e" Jan 27 19:13:44 crc kubenswrapper[4776]: I0127 19:13:44.479119 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e"} err="failed to get container status \"1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e\": rpc error: code = NotFound desc = could not find container \"1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e\": container with ID starting with 1eb50ce2f5eb3f6d484a7b81824679f6911746adda90dd02c24c5ff5250a429e not found: ID does not exist" Jan 27 19:13:45 crc kubenswrapper[4776]: I0127 19:13:45.157005 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:13:45 crc kubenswrapper[4776]: I0127 19:13:45.157436 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r76hn" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="registry-server" containerID="cri-o://e3173c73f7ba5611ae83d168557385796901aed06de9038f3bef8c33924d054c" gracePeriod=2 Jan 27 19:13:45 crc kubenswrapper[4776]: I0127 19:13:45.408372 4776 generic.go:334] "Generic (PLEG): container finished" podID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerID="e3173c73f7ba5611ae83d168557385796901aed06de9038f3bef8c33924d054c" exitCode=0 Jan 27 19:13:45 crc kubenswrapper[4776]: I0127 19:13:45.408444 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerDied","Data":"e3173c73f7ba5611ae83d168557385796901aed06de9038f3bef8c33924d054c"} Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.146588 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.281545 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kttvr\" (UniqueName: \"kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr\") pod \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.281626 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content\") pod \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.281647 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities\") pod \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\" (UID: \"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4\") " Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.283636 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities" (OuterVolumeSpecName: "utilities") pod "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" (UID: "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.288940 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr" (OuterVolumeSpecName: "kube-api-access-kttvr") pod "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" (UID: "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4"). InnerVolumeSpecName "kube-api-access-kttvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.331586 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" path="/var/lib/kubelet/pods/4a2aa1a0-6ad1-4edd-a71a-987976a2f323/volumes" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.389161 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kttvr\" (UniqueName: \"kubernetes.io/projected/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-kube-api-access-kttvr\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.389208 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.416504 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r76hn" event={"ID":"38b6fb13-3a52-4ed0-be81-b9a0900fb2c4","Type":"ContainerDied","Data":"3a7db66d509ca9c6e7a750be6a523ee5d0d2ca7beadb5da8d4ce44f9d9cd4e78"} Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.416568 4776 scope.go:117] "RemoveContainer" containerID="e3173c73f7ba5611ae83d168557385796901aed06de9038f3bef8c33924d054c" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.416574 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r76hn" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.432794 4776 scope.go:117] "RemoveContainer" containerID="b3c1675ea16d0004eff7c6a54a470028cba8bbb12382137857bba51a2774285d" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.451655 4776 scope.go:117] "RemoveContainer" containerID="bbd00764e3cdab6c831f7071fdbe990c98c5c5529c52f400ad6eca8be2b47910" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.482698 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" (UID: "38b6fb13-3a52-4ed0-be81-b9a0900fb2c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.490400 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.762680 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:13:46 crc kubenswrapper[4776]: I0127 19:13:46.767326 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r76hn"] Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.142186 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.142811 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" podUID="c20a8a77-cfda-4d60-8707-794b092551d3" containerName="controller-manager" containerID="cri-o://a2478aa844183a59da6f87ae27ac1189d9e52564cce0c3ee4ddb1fac5390c64e" gracePeriod=30 Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.238095 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.238331 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" podUID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" containerName="route-controller-manager" containerID="cri-o://5f70f6314e0cfdffc32ec094ec660b0455d4da20298f1241d64f144a80fc3fd2" gracePeriod=30 Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.326031 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" path="/var/lib/kubelet/pods/38b6fb13-3a52-4ed0-be81-b9a0900fb2c4/volumes" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.433312 4776 generic.go:334] "Generic (PLEG): container finished" podID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" containerID="5f70f6314e0cfdffc32ec094ec660b0455d4da20298f1241d64f144a80fc3fd2" exitCode=0 Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.433375 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" event={"ID":"6d8c28b9-d6dc-4392-8569-09ad24b4f113","Type":"ContainerDied","Data":"5f70f6314e0cfdffc32ec094ec660b0455d4da20298f1241d64f144a80fc3fd2"} Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.434501 4776 generic.go:334] "Generic (PLEG): container finished" podID="c20a8a77-cfda-4d60-8707-794b092551d3" containerID="a2478aa844183a59da6f87ae27ac1189d9e52564cce0c3ee4ddb1fac5390c64e" exitCode=0 Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.434528 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" event={"ID":"c20a8a77-cfda-4d60-8707-794b092551d3","Type":"ContainerDied","Data":"a2478aa844183a59da6f87ae27ac1189d9e52564cce0c3ee4ddb1fac5390c64e"} Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.639327 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.643587 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.822126 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config\") pod \"c20a8a77-cfda-4d60-8707-794b092551d3\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.822549 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert\") pod \"c20a8a77-cfda-4d60-8707-794b092551d3\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.822722 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnqfl\" (UniqueName: \"kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl\") pod \"c20a8a77-cfda-4d60-8707-794b092551d3\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.823580 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca\") pod \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.823747 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca\") pod \"c20a8a77-cfda-4d60-8707-794b092551d3\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.823907 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles\") pod \"c20a8a77-cfda-4d60-8707-794b092551d3\" (UID: \"c20a8a77-cfda-4d60-8707-794b092551d3\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824125 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config\") pod \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824234 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert\") pod \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824343 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv\") pod \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\" (UID: \"6d8c28b9-d6dc-4392-8569-09ad24b4f113\") " Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824527 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c20a8a77-cfda-4d60-8707-794b092551d3" (UID: "c20a8a77-cfda-4d60-8707-794b092551d3"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824553 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca" (OuterVolumeSpecName: "client-ca") pod "c20a8a77-cfda-4d60-8707-794b092551d3" (UID: "c20a8a77-cfda-4d60-8707-794b092551d3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824743 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca" (OuterVolumeSpecName: "client-ca") pod "6d8c28b9-d6dc-4392-8569-09ad24b4f113" (UID: "6d8c28b9-d6dc-4392-8569-09ad24b4f113"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824842 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config" (OuterVolumeSpecName: "config") pod "c20a8a77-cfda-4d60-8707-794b092551d3" (UID: "c20a8a77-cfda-4d60-8707-794b092551d3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.824937 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config" (OuterVolumeSpecName: "config") pod "6d8c28b9-d6dc-4392-8569-09ad24b4f113" (UID: "6d8c28b9-d6dc-4392-8569-09ad24b4f113"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.825115 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.825217 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.825337 4776 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-client-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.825417 4776 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c20a8a77-cfda-4d60-8707-794b092551d3-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.825493 4776 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d8c28b9-d6dc-4392-8569-09ad24b4f113-config\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.827591 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl" (OuterVolumeSpecName: "kube-api-access-bnqfl") pod "c20a8a77-cfda-4d60-8707-794b092551d3" (UID: "c20a8a77-cfda-4d60-8707-794b092551d3"). InnerVolumeSpecName "kube-api-access-bnqfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.827630 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c20a8a77-cfda-4d60-8707-794b092551d3" (UID: "c20a8a77-cfda-4d60-8707-794b092551d3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.829314 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv" (OuterVolumeSpecName: "kube-api-access-p8xnv") pod "6d8c28b9-d6dc-4392-8569-09ad24b4f113" (UID: "6d8c28b9-d6dc-4392-8569-09ad24b4f113"). InnerVolumeSpecName "kube-api-access-p8xnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.841353 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6d8c28b9-d6dc-4392-8569-09ad24b4f113" (UID: "6d8c28b9-d6dc-4392-8569-09ad24b4f113"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.926143 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d8c28b9-d6dc-4392-8569-09ad24b4f113-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.926175 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8xnv\" (UniqueName: \"kubernetes.io/projected/6d8c28b9-d6dc-4392-8569-09ad24b4f113-kube-api-access-p8xnv\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.926187 4776 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c20a8a77-cfda-4d60-8707-794b092551d3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:48 crc kubenswrapper[4776]: I0127 19:13:48.926195 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnqfl\" (UniqueName: \"kubernetes.io/projected/c20a8a77-cfda-4d60-8707-794b092551d3-kube-api-access-bnqfl\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.441291 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.452047 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks" event={"ID":"6d8c28b9-d6dc-4392-8569-09ad24b4f113","Type":"ContainerDied","Data":"909f5144a03306f0435e23827eb189c23be12de309ebeb61ff757551d5cd8e5c"} Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.452118 4776 scope.go:117] "RemoveContainer" containerID="5f70f6314e0cfdffc32ec094ec660b0455d4da20298f1241d64f144a80fc3fd2" Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.453883 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" event={"ID":"c20a8a77-cfda-4d60-8707-794b092551d3","Type":"ContainerDied","Data":"685898f1b04d55763e0972e38901053bdcddf8e5cbff37a5203f53082b96aee6"} Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.453972 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7cd796b576-mphl4" Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.474272 4776 scope.go:117] "RemoveContainer" containerID="a2478aa844183a59da6f87ae27ac1189d9e52564cce0c3ee4ddb1fac5390c64e" Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.483255 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.486873 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-669ff57b9c-h4pks"] Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.501642 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:49 crc kubenswrapper[4776]: I0127 19:13:49.506133 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7cd796b576-mphl4"] Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.119905 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-575bd794d-qgs4p"] Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120532 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120549 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120561 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" containerName="route-controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120572 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" containerName="route-controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120581 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120590 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120598 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120605 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120616 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120623 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120637 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120644 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120662 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c20a8a77-cfda-4d60-8707-794b092551d3" containerName="controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120670 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="c20a8a77-cfda-4d60-8707-794b092551d3" containerName="controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120681 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120688 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120698 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120705 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120717 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120724 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120734 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120741 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120750 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120757 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="extract-content" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120793 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120801 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="extract-utilities" Jan 27 19:13:50 crc kubenswrapper[4776]: E0127 19:13:50.120814 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120821 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120926 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" containerName="route-controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120942 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="c20a8a77-cfda-4d60-8707-794b092551d3" containerName="controller-manager" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120959 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="15735b7d-e2cc-4603-8cc2-d284fcf18ef8" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120968 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="38b6fb13-3a52-4ed0-be81-b9a0900fb2c4" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120975 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a2aa1a0-6ad1-4edd-a71a-987976a2f323" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.120986 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="a87f66a9-fc0e-43d0-a4da-7e205ba8f81b" containerName="registry-server" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.121415 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.123538 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.124114 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.124120 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.124189 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.124554 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.124943 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.127920 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd"] Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.128642 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.131451 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.131485 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.131554 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.131495 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.131932 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.132149 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.135392 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.141152 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-575bd794d-qgs4p"] Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.159528 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd"] Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.241916 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-proxy-ca-bundles\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242194 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-serving-cert\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242222 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2lm5\" (UniqueName: \"kubernetes.io/projected/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-kube-api-access-r2lm5\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242261 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-client-ca\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242277 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-serving-cert\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242298 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j6fm\" (UniqueName: \"kubernetes.io/projected/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-kube-api-access-9j6fm\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242350 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-config\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242374 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-client-ca\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.242391 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-config\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.326799 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d8c28b9-d6dc-4392-8569-09ad24b4f113" path="/var/lib/kubelet/pods/6d8c28b9-d6dc-4392-8569-09ad24b4f113/volumes" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.327442 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c20a8a77-cfda-4d60-8707-794b092551d3" path="/var/lib/kubelet/pods/c20a8a77-cfda-4d60-8707-794b092551d3/volumes" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343240 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-proxy-ca-bundles\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343296 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-serving-cert\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343340 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2lm5\" (UniqueName: \"kubernetes.io/projected/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-kube-api-access-r2lm5\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343390 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-client-ca\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343419 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-serving-cert\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343449 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j6fm\" (UniqueName: \"kubernetes.io/projected/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-kube-api-access-9j6fm\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343479 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-config\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343506 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-client-ca\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.343527 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-config\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.344284 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-client-ca\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.344672 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-proxy-ca-bundles\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.345123 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-config\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.345454 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-client-ca\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.345828 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-config\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.361352 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-serving-cert\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.361797 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-serving-cert\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.367654 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2lm5\" (UniqueName: \"kubernetes.io/projected/3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe-kube-api-access-r2lm5\") pod \"route-controller-manager-7f64b9bb4b-wkqdd\" (UID: \"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe\") " pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.368274 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j6fm\" (UniqueName: \"kubernetes.io/projected/2c8bfe58-21a0-4eaa-bd40-50a7c8da641d-kube-api-access-9j6fm\") pod \"controller-manager-575bd794d-qgs4p\" (UID: \"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d\") " pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.449459 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.457878 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.865674 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd"] Jan 27 19:13:50 crc kubenswrapper[4776]: I0127 19:13:50.906589 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-575bd794d-qgs4p"] Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.470378 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" event={"ID":"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe","Type":"ContainerStarted","Data":"4eef52124122b181b71f5ddbb92c70f3694d6f42288459ea218342be4ced718e"} Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.470933 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.470950 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" event={"ID":"3bc5eefb-6d3d-447f-83ea-62a1cdceb8fe","Type":"ContainerStarted","Data":"13205f83b37f1259f0af8e8a87d270201b575975be1f7c9052592393550eb752"} Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.471643 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" event={"ID":"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d","Type":"ContainerStarted","Data":"f1a589eaec212b40608bb83de107c92da108b46b1c61b9799b7a1452296f100b"} Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.471680 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" event={"ID":"2c8bfe58-21a0-4eaa-bd40-50a7c8da641d","Type":"ContainerStarted","Data":"2b6bbff3c827031b8053596475e8e26b48d581921ca8b686497512a8abe69e76"} Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.471914 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.476834 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.477584 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" Jan 27 19:13:51 crc kubenswrapper[4776]: I0127 19:13:51.488510 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f64b9bb4b-wkqdd" podStartSLOduration=3.488490051 podStartE2EDuration="3.488490051s" podCreationTimestamp="2026-01-27 19:13:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:51.485973593 +0000 UTC m=+233.991578740" watchObservedRunningTime="2026-01-27 19:13:51.488490051 +0000 UTC m=+233.994095218" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.318206 4776 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.318942 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a" gracePeriod=15 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.319152 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2" gracePeriod=15 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.319216 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b" gracePeriod=15 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.319278 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341" gracePeriod=15 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.319384 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b" gracePeriod=15 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.342294 4776 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343259 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343282 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343299 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343307 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343316 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343323 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343335 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343342 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343353 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343359 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343369 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343376 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.343385 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343393 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343930 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343954 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343971 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343978 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343988 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.343996 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.345443 4776 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.346279 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.387655 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-575bd794d-qgs4p" podStartSLOduration=6.387639784 podStartE2EDuration="6.387639784s" podCreationTimestamp="2026-01-27 19:13:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:13:51.535543213 +0000 UTC m=+234.041148360" watchObservedRunningTime="2026-01-27 19:13:54.387639784 +0000 UTC m=+236.893244941" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.388337 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435444 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435530 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435747 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435820 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435851 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435917 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435949 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.435993 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.536952 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537009 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537041 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537067 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537088 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537125 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537152 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537150 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537181 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537180 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537227 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537262 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537240 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537235 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537151 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.537428 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.546440 4776 generic.go:334] "Generic (PLEG): container finished" podID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" containerID="cf5c1dc1e8166252bfd9d4df113b4dcafc73ec4db5a88161dbf29d1b86832561" exitCode=0 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.546529 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"09b09d3e-80ec-4ad1-a3b0-f011077ceefa","Type":"ContainerDied","Data":"cf5c1dc1e8166252bfd9d4df113b4dcafc73ec4db5a88161dbf29d1b86832561"} Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.547738 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.548255 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.548451 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.549643 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.550292 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b" exitCode=0 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.550320 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b" exitCode=0 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.550327 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2" exitCode=0 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.550334 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341" exitCode=2 Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.550368 4776 scope.go:117] "RemoveContainer" containerID="f05abf0b0d5752026dbc6eb1ce5aecb4b86d03b74e302c0e165345187d0cf81c" Jan 27 19:13:54 crc kubenswrapper[4776]: I0127 19:13:54.685195 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:13:54 crc kubenswrapper[4776]: W0127 19:13:54.721739 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-1a30c25e5d876e1c712f48b78a2c2ec701d31054774cc5c4c2be9be32e976a55 WatchSource:0}: Error finding container 1a30c25e5d876e1c712f48b78a2c2ec701d31054774cc5c4c2be9be32e976a55: Status 404 returned error can't find the container with id 1a30c25e5d876e1c712f48b78a2c2ec701d31054774cc5c4c2be9be32e976a55 Jan 27 19:13:54 crc kubenswrapper[4776]: E0127 19:13:54.726335 4776 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.179:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188eac60a7393c5e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 19:13:54.725567582 +0000 UTC m=+237.231172729,LastTimestamp:2026-01-27 19:13:54.725567582 +0000 UTC m=+237.231172729,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.034701 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" containerID="cri-o://50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f" gracePeriod=15 Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.504271 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.504845 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.505263 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.505459 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.558828 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.561387 4776 generic.go:334] "Generic (PLEG): container finished" podID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerID="50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f" exitCode=0 Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.561484 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.561490 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" event={"ID":"5991ed3f-a5bf-4586-918a-b79afd2d8c27","Type":"ContainerDied","Data":"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f"} Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.561555 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" event={"ID":"5991ed3f-a5bf-4586-918a-b79afd2d8c27","Type":"ContainerDied","Data":"89d2686ec684ef2f04cd6a80a71e420f35801fe54242c0881605bf3fa9aa3f99"} Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.561582 4776 scope.go:117] "RemoveContainer" containerID="50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.562343 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.563624 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.563711 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2"} Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.563756 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"1a30c25e5d876e1c712f48b78a2c2ec701d31054774cc5c4c2be9be32e976a55"} Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.564292 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.564690 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.565057 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.565360 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.586467 4776 scope.go:117] "RemoveContainer" containerID="50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f" Jan 27 19:13:55 crc kubenswrapper[4776]: E0127 19:13:55.587128 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f\": container with ID starting with 50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f not found: ID does not exist" containerID="50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.587172 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f"} err="failed to get container status \"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f\": rpc error: code = NotFound desc = could not find container \"50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f\": container with ID starting with 50c08e83a45d5e5d43aca451883ac717cb8f6a8f1c5fdfc7ec89c7f3c79f089f not found: ID does not exist" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652088 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652451 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652310 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652481 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652531 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652561 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652611 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vk2t\" (UniqueName: \"kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652647 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652680 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652701 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652719 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652742 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652807 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652842 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.652865 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs\") pod \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\" (UID: \"5991ed3f-a5bf-4586-918a-b79afd2d8c27\") " Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.653090 4776 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.653874 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.654122 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.658375 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.659117 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.659955 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.660219 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.665597 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t" (OuterVolumeSpecName: "kube-api-access-5vk2t") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "kube-api-access-5vk2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.669875 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.670894 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.671859 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.672947 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.673931 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.675160 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "5991ed3f-a5bf-4586-918a-b79afd2d8c27" (UID: "5991ed3f-a5bf-4586-918a-b79afd2d8c27"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753443 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vk2t\" (UniqueName: \"kubernetes.io/projected/5991ed3f-a5bf-4586-918a-b79afd2d8c27-kube-api-access-5vk2t\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753473 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753487 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753501 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753514 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753525 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753537 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753550 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753562 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753575 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753586 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753598 4776 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5991ed3f-a5bf-4586-918a-b79afd2d8c27-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.753610 4776 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5991ed3f-a5bf-4586-918a-b79afd2d8c27-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.873326 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.873757 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.873938 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.874127 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.876561 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.876721 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:55 crc kubenswrapper[4776]: I0127 19:13:55.876914 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057079 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir\") pod \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057235 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access\") pod \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057306 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock\") pod \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\" (UID: \"09b09d3e-80ec-4ad1-a3b0-f011077ceefa\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057407 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock" (OuterVolumeSpecName: "var-lock") pod "09b09d3e-80ec-4ad1-a3b0-f011077ceefa" (UID: "09b09d3e-80ec-4ad1-a3b0-f011077ceefa"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057342 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "09b09d3e-80ec-4ad1-a3b0-f011077ceefa" (UID: "09b09d3e-80ec-4ad1-a3b0-f011077ceefa"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057696 4776 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-var-lock\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.057730 4776 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.061634 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "09b09d3e-80ec-4ad1-a3b0-f011077ceefa" (UID: "09b09d3e-80ec-4ad1-a3b0-f011077ceefa"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.158746 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b09d3e-80ec-4ad1-a3b0-f011077ceefa-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.570584 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"09b09d3e-80ec-4ad1-a3b0-f011077ceefa","Type":"ContainerDied","Data":"dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17"} Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.571027 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dec239c252a4a9d4ceda99dd7a3ff188f71a4658077cba98d6f60d6effb25a17" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.570632 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.574929 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.575150 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.575371 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.769854 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.770522 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.771380 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.771800 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.772035 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.772310 4776 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.865910 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866053 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866070 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866129 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866148 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866281 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866491 4776 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866518 4776 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:56 crc kubenswrapper[4776]: I0127 19:13:56.866535 4776 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.577259 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.578210 4776 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a" exitCode=0 Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.578286 4776 scope.go:117] "RemoveContainer" containerID="caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.578350 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.600497 4776 scope.go:117] "RemoveContainer" containerID="65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.606414 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.606967 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.607685 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.608131 4776 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.616392 4776 scope.go:117] "RemoveContainer" containerID="5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.629474 4776 scope.go:117] "RemoveContainer" containerID="235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.649653 4776 scope.go:117] "RemoveContainer" containerID="192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.676686 4776 scope.go:117] "RemoveContainer" containerID="caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.701272 4776 scope.go:117] "RemoveContainer" containerID="caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.702031 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\": container with ID starting with caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b not found: ID does not exist" containerID="caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.702086 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b"} err="failed to get container status \"caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\": rpc error: code = NotFound desc = could not find container \"caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b\": container with ID starting with caf13c1ea075fe1bd24aea8136931bb33257c2aedcca6e6520f35ae801500d0b not found: ID does not exist" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.702121 4776 scope.go:117] "RemoveContainer" containerID="65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.703782 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\": container with ID starting with 65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b not found: ID does not exist" containerID="65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.703827 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b"} err="failed to get container status \"65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\": rpc error: code = NotFound desc = could not find container \"65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b\": container with ID starting with 65d62a26fe5b4d5ce442af5efb3bae8ed3add9b11f551efd43e7e91265684e1b not found: ID does not exist" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.703855 4776 scope.go:117] "RemoveContainer" containerID="5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.704107 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\": container with ID starting with 5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2 not found: ID does not exist" containerID="5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704135 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2"} err="failed to get container status \"5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\": rpc error: code = NotFound desc = could not find container \"5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2\": container with ID starting with 5719b4aef16d41c92579eba75c029b0b6f1248213f7ca71775849ba1477d19d2 not found: ID does not exist" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704151 4776 scope.go:117] "RemoveContainer" containerID="235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.704466 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\": container with ID starting with 235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341 not found: ID does not exist" containerID="235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704495 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341"} err="failed to get container status \"235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\": rpc error: code = NotFound desc = could not find container \"235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341\": container with ID starting with 235c81eb3ca9ea74b92de20dbe91d7e1d24b3076bf721a873a8b572453fe7341 not found: ID does not exist" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704529 4776 scope.go:117] "RemoveContainer" containerID="192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.704787 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\": container with ID starting with 192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a not found: ID does not exist" containerID="192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704825 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a"} err="failed to get container status \"192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\": rpc error: code = NotFound desc = could not find container \"192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a\": container with ID starting with 192152e5285f04e562b6341e83900eaaba38babed048c2bff0a9e59aa1dc9c7a not found: ID does not exist" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.704849 4776 scope.go:117] "RemoveContainer" containerID="caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1" Jan 27 19:13:57 crc kubenswrapper[4776]: E0127 19:13:57.705492 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\": container with ID starting with caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1 not found: ID does not exist" containerID="caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1" Jan 27 19:13:57 crc kubenswrapper[4776]: I0127 19:13:57.705511 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1"} err="failed to get container status \"caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\": rpc error: code = NotFound desc = could not find container \"caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1\": container with ID starting with caa2309d2cbf6bfc23e2415a852cee64acc6d164da8d63439198997cdbf247c1 not found: ID does not exist" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.143330 4776 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.144052 4776 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.144486 4776 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.144756 4776 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.144998 4776 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.145023 4776 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.145261 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="200ms" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.326050 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.326492 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.326708 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.326913 4776 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:13:58 crc kubenswrapper[4776]: I0127 19:13:58.334721 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.347021 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="400ms" Jan 27 19:13:58 crc kubenswrapper[4776]: E0127 19:13:58.748561 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="800ms" Jan 27 19:13:59 crc kubenswrapper[4776]: E0127 19:13:59.550656 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="1.6s" Jan 27 19:14:01 crc kubenswrapper[4776]: E0127 19:14:01.151551 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="3.2s" Jan 27 19:14:01 crc kubenswrapper[4776]: E0127 19:14:01.333878 4776 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.179:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188eac60a7393c5e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-27 19:13:54.725567582 +0000 UTC m=+237.231172729,LastTimestamp:2026-01-27 19:13:54.725567582 +0000 UTC m=+237.231172729,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 27 19:14:04 crc kubenswrapper[4776]: E0127 19:14:04.353207 4776 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.179:6443: connect: connection refused" interval="6.4s" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.319143 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.320053 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.320626 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.321368 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.344357 4776 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.344401 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:06 crc kubenswrapper[4776]: E0127 19:14:06.345030 4776 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.345850 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:06 crc kubenswrapper[4776]: W0127 19:14:06.377296 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-2f7ccab5771b418985fc6afb53f5a7147a00df46eaeca839a2f816357cc7afbb WatchSource:0}: Error finding container 2f7ccab5771b418985fc6afb53f5a7147a00df46eaeca839a2f816357cc7afbb: Status 404 returned error can't find the container with id 2f7ccab5771b418985fc6afb53f5a7147a00df46eaeca839a2f816357cc7afbb Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.627544 4776 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="ddf9820d04b5d2565c259feae0bd5d03a1ffaf80f86a0c916a332446375a11f6" exitCode=0 Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.627599 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"ddf9820d04b5d2565c259feae0bd5d03a1ffaf80f86a0c916a332446375a11f6"} Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.627922 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2f7ccab5771b418985fc6afb53f5a7147a00df46eaeca839a2f816357cc7afbb"} Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.628163 4776 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.628178 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.628477 4776 status_manager.go:851] "Failed to get status for pod" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:06 crc kubenswrapper[4776]: E0127 19:14:06.628609 4776 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.628738 4776 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:06 crc kubenswrapper[4776]: I0127 19:14:06.628980 4776 status_manager.go:851] "Failed to get status for pod" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" pod="openshift-authentication/oauth-openshift-558db77b4-dghnj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-dghnj\": dial tcp 38.102.83.179:6443: connect: connection refused" Jan 27 19:14:07 crc kubenswrapper[4776]: I0127 19:14:07.640034 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c97ef732abc7370e2e8b0851a3864b781ec23d4b4e665a4e62bc77a4365c761c"} Jan 27 19:14:07 crc kubenswrapper[4776]: I0127 19:14:07.640431 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3746cae9e5edfd378c9299e069f375a2195b0f23ab5b1410a5eaccceb1605b08"} Jan 27 19:14:07 crc kubenswrapper[4776]: I0127 19:14:07.640448 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"90af8d03d792f20ef5980b8913a13c414d2318d414ca2969fa83da564dcd8f1d"} Jan 27 19:14:07 crc kubenswrapper[4776]: I0127 19:14:07.640461 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0cfa64bb3db0ac77b13675c08e99e7b173481a1a1c6871e999b2c04363b0f297"} Jan 27 19:14:08 crc kubenswrapper[4776]: I0127 19:14:08.650344 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"07ae8e1db70b58b521b8bc7f667fe85791789bbb7ae0acdd7eafc45f4156ffb5"} Jan 27 19:14:08 crc kubenswrapper[4776]: I0127 19:14:08.650545 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:08 crc kubenswrapper[4776]: I0127 19:14:08.650651 4776 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:08 crc kubenswrapper[4776]: I0127 19:14:08.650679 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:09 crc kubenswrapper[4776]: I0127 19:14:09.661642 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 27 19:14:09 crc kubenswrapper[4776]: I0127 19:14:09.661714 4776 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff" exitCode=1 Jan 27 19:14:09 crc kubenswrapper[4776]: I0127 19:14:09.661752 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff"} Jan 27 19:14:09 crc kubenswrapper[4776]: I0127 19:14:09.662260 4776 scope.go:117] "RemoveContainer" containerID="81c30093246ca8498b374dedbf6b85645debd27e5e42fc68217d07d361ee11ff" Jan 27 19:14:10 crc kubenswrapper[4776]: I0127 19:14:10.670816 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 27 19:14:10 crc kubenswrapper[4776]: I0127 19:14:10.671119 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6441d12e592c2278e6824fbfca90cc6e2251439a4aa4011755082a76eee83335"} Jan 27 19:14:11 crc kubenswrapper[4776]: I0127 19:14:11.346395 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:11 crc kubenswrapper[4776]: I0127 19:14:11.346491 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:11 crc kubenswrapper[4776]: I0127 19:14:11.355450 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:11 crc kubenswrapper[4776]: I0127 19:14:11.812450 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:14:13 crc kubenswrapper[4776]: I0127 19:14:13.658583 4776 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:13 crc kubenswrapper[4776]: I0127 19:14:13.696601 4776 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:13 crc kubenswrapper[4776]: I0127 19:14:13.696628 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:13 crc kubenswrapper[4776]: I0127 19:14:13.701758 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:13 crc kubenswrapper[4776]: I0127 19:14:13.703951 4776 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="2783da63-3eb6-4d72-8760-1eb3bf66d09d" Jan 27 19:14:14 crc kubenswrapper[4776]: I0127 19:14:14.700709 4776 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:14 crc kubenswrapper[4776]: I0127 19:14:14.701001 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="5bea6ea5-d676-42cb-83ea-9a2c6cab54f1" Jan 27 19:14:17 crc kubenswrapper[4776]: I0127 19:14:17.122132 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:14:17 crc kubenswrapper[4776]: I0127 19:14:17.127360 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:14:18 crc kubenswrapper[4776]: I0127 19:14:18.328886 4776 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="2783da63-3eb6-4d72-8760-1eb3bf66d09d" Jan 27 19:14:19 crc kubenswrapper[4776]: I0127 19:14:19.836308 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 27 19:14:21 crc kubenswrapper[4776]: I0127 19:14:21.343382 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 27 19:14:21 crc kubenswrapper[4776]: I0127 19:14:21.817112 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 27 19:14:23 crc kubenswrapper[4776]: I0127 19:14:23.525726 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 27 19:14:23 crc kubenswrapper[4776]: I0127 19:14:23.680868 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 27 19:14:24 crc kubenswrapper[4776]: I0127 19:14:24.455517 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 27 19:14:24 crc kubenswrapper[4776]: I0127 19:14:24.894884 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 27 19:14:25 crc kubenswrapper[4776]: I0127 19:14:25.087300 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 27 19:14:25 crc kubenswrapper[4776]: I0127 19:14:25.147968 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 27 19:14:25 crc kubenswrapper[4776]: I0127 19:14:25.294210 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 27 19:14:25 crc kubenswrapper[4776]: I0127 19:14:25.417350 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 27 19:14:26 crc kubenswrapper[4776]: I0127 19:14:26.305302 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 27 19:14:26 crc kubenswrapper[4776]: I0127 19:14:26.535287 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 27 19:14:26 crc kubenswrapper[4776]: I0127 19:14:26.538903 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 27 19:14:26 crc kubenswrapper[4776]: I0127 19:14:26.866992 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 27 19:14:26 crc kubenswrapper[4776]: I0127 19:14:26.942652 4776 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 27 19:14:27 crc kubenswrapper[4776]: I0127 19:14:27.249061 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 27 19:14:27 crc kubenswrapper[4776]: I0127 19:14:27.311760 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 27 19:14:27 crc kubenswrapper[4776]: I0127 19:14:27.457640 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 27 19:14:27 crc kubenswrapper[4776]: I0127 19:14:27.550323 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 27 19:14:27 crc kubenswrapper[4776]: I0127 19:14:27.676657 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.039915 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.086046 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.094981 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.118603 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.272263 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.284594 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.447501 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.489918 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.525078 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.623632 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.688435 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.740507 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.991702 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 27 19:14:28 crc kubenswrapper[4776]: I0127 19:14:28.995269 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.015172 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.045278 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.092127 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.298050 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.382908 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.404192 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.533956 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.556263 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.712943 4776 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.713937 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.717639 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.718607 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.739452 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.767300 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.796337 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 27 19:14:29 crc kubenswrapper[4776]: I0127 19:14:29.893031 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.026969 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.108216 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.194806 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.263362 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.270225 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.314376 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.329587 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.348488 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.446008 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.492333 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.661295 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.722724 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.798810 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 27 19:14:30 crc kubenswrapper[4776]: I0127 19:14:30.988455 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.013107 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.058696 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.107943 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.263733 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.287872 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.352101 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.371642 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.417349 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.433360 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.466415 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.474749 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.503119 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.618988 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.630756 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.691750 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.750458 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.760587 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.776435 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.780891 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.782813 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.873272 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.928981 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.957164 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 27 19:14:31 crc kubenswrapper[4776]: I0127 19:14:31.987125 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.014250 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.015562 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.030363 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.038076 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.047801 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.058898 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.097355 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.105299 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.129701 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.181827 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.258164 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.315651 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.317234 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.384041 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.538644 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.541655 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.601209 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.635461 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.684253 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.690602 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.766745 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.825185 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.831238 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.881796 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.898258 4776 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.902342 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=38.90232161 podStartE2EDuration="38.90232161s" podCreationTimestamp="2026-01-27 19:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:14:13.42682911 +0000 UTC m=+255.932434267" watchObservedRunningTime="2026-01-27 19:14:32.90232161 +0000 UTC m=+275.407926757" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.903452 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-dghnj"] Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.903506 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.908834 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.925532 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.925509789 podStartE2EDuration="19.925509789s" podCreationTimestamp="2026-01-27 19:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:14:32.92138395 +0000 UTC m=+275.426989107" watchObservedRunningTime="2026-01-27 19:14:32.925509789 +0000 UTC m=+275.431114946" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.933351 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 27 19:14:32 crc kubenswrapper[4776]: I0127 19:14:32.955540 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.060515 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.076084 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.101077 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.109609 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.201371 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.204924 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.205963 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.220921 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.318555 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.345428 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.362729 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.406742 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.459903 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.532878 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.602048 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.646168 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.676186 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.689999 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.694729 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.755243 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.882597 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.900194 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 27 19:14:33 crc kubenswrapper[4776]: I0127 19:14:33.900246 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.110102 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.185412 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.189059 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.331951 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" path="/var/lib/kubelet/pods/5991ed3f-a5bf-4586-918a-b79afd2d8c27/volumes" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.400501 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.417632 4776 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.438751 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.448252 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.502944 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.696656 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.759542 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.855507 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 27 19:14:34 crc kubenswrapper[4776]: I0127 19:14:34.975655 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.014122 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.079821 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.133050 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.142366 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.179618 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.196553 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.214572 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.272887 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.314157 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.325506 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.329194 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.353028 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.355396 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.412052 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.542514 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.648876 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-d9df679ff-xt7z9"] Jan 27 19:14:35 crc kubenswrapper[4776]: E0127 19:14:35.649128 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.649146 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" Jan 27 19:14:35 crc kubenswrapper[4776]: E0127 19:14:35.649166 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" containerName="installer" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.649175 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" containerName="installer" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.649301 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="09b09d3e-80ec-4ad1-a3b0-f011077ceefa" containerName="installer" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.649327 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="5991ed3f-a5bf-4586-918a-b79afd2d8c27" containerName="oauth-openshift" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.649799 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.653092 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.653460 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.654436 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.654591 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.654627 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.655244 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.656650 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.657009 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.657192 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.657325 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.657857 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.657987 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.661320 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.675541 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.675990 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.684369 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.714686 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.740673 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.770214 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.778643 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.779733 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.834316 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850268 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-session\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850336 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-service-ca\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850391 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850491 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-error\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850549 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850601 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vww7\" (UniqueName: \"kubernetes.io/projected/3de7e918-52df-4626-bb13-ba69b0d06db2-kube-api-access-7vww7\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850641 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850681 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-dir\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850718 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-login\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850835 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-policies\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.850930 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.851036 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.851105 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-router-certs\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.851145 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.872529 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.894641 4776 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.895000 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2" gracePeriod=5 Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952134 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-session\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952198 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-service-ca\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952242 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952343 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-error\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952411 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952610 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vww7\" (UniqueName: \"kubernetes.io/projected/3de7e918-52df-4626-bb13-ba69b0d06db2-kube-api-access-7vww7\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952714 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952818 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-dir\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952880 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-login\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952956 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-policies\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.952960 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-dir\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.953003 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.953066 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.953146 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-router-certs\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.953195 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.955312 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.957599 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-audit-policies\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.963437 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.963460 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-service-ca\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.964054 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-session\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.965995 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.966157 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.973343 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-error\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.974276 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-router-certs\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.975076 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-user-template-login\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.975902 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.978231 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vww7\" (UniqueName: \"kubernetes.io/projected/3de7e918-52df-4626-bb13-ba69b0d06db2-kube-api-access-7vww7\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.980204 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3de7e918-52df-4626-bb13-ba69b0d06db2-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d9df679ff-xt7z9\" (UID: \"3de7e918-52df-4626-bb13-ba69b0d06db2\") " pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:35 crc kubenswrapper[4776]: I0127 19:14:35.990515 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.024096 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.072679 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.110620 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.267116 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.321424 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.342143 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.484297 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.540609 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.570639 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.574885 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.646560 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.686818 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.789538 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.818069 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.818593 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.821169 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.898289 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 27 19:14:36 crc kubenswrapper[4776]: I0127 19:14:36.936757 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.004688 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.156204 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.188123 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.205269 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.236125 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.264646 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.299281 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.323812 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.335542 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.522492 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.544375 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.582699 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.584820 4776 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.707557 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 27 19:14:37 crc kubenswrapper[4776]: I0127 19:14:37.882416 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.024277 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.057354 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.096934 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.219155 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.280796 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.330907 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.421225 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.439991 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.473018 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.753595 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.755214 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 27 19:14:38 crc kubenswrapper[4776]: I0127 19:14:38.846871 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.024438 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.131029 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.316636 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.563222 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.752451 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.775957 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 27 19:14:39 crc kubenswrapper[4776]: I0127 19:14:39.967696 4776 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.101831 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d9df679ff-xt7z9"] Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.351501 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.371090 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.396421 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.469887 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.481004 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d9df679ff-xt7z9"] Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.526269 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.847104 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" event={"ID":"3de7e918-52df-4626-bb13-ba69b0d06db2","Type":"ContainerStarted","Data":"3f90350e6f55b82a5c43a5f84859bf26b1b315f45ea9359c34ca20005f73d766"} Jan 27 19:14:40 crc kubenswrapper[4776]: I0127 19:14:40.874471 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.028515 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.028581 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.224937 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225039 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225066 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225123 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225152 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225190 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225225 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225263 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225357 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225490 4776 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225516 4776 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225529 4776 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.225542 4776 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.234462 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.326530 4776 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.375288 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.559481 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.855661 4776 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.855720 4776 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2" exitCode=137 Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.855826 4776 scope.go:117] "RemoveContainer" containerID="963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.855898 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.858325 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" event={"ID":"3de7e918-52df-4626-bb13-ba69b0d06db2","Type":"ContainerStarted","Data":"30a9d5c95964ca0ebeb89a8f8c3be5b5330534d70713e48054e6c4db61f07fa2"} Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.858589 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.865117 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.879267 4776 scope.go:117] "RemoveContainer" containerID="963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2" Jan 27 19:14:41 crc kubenswrapper[4776]: E0127 19:14:41.879741 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2\": container with ID starting with 963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2 not found: ID does not exist" containerID="963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.879801 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2"} err="failed to get container status \"963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2\": rpc error: code = NotFound desc = could not find container \"963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2\": container with ID starting with 963402e5eebfbae954d62258f17f7c32cb5f52cd63a174959181162a989da9c2 not found: ID does not exist" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.893661 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-d9df679ff-xt7z9" podStartSLOduration=71.893625648 podStartE2EDuration="1m11.893625648s" podCreationTimestamp="2026-01-27 19:13:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:14:41.887115373 +0000 UTC m=+284.392720550" watchObservedRunningTime="2026-01-27 19:14:41.893625648 +0000 UTC m=+284.399230835" Jan 27 19:14:41 crc kubenswrapper[4776]: I0127 19:14:41.921358 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.327538 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.328131 4776 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.334751 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.338751 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.338831 4776 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="42a94bca-a7d1-4cf7-b6bc-80510a2c6b1b" Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.343599 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 27 19:14:42 crc kubenswrapper[4776]: I0127 19:14:42.343644 4776 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="42a94bca-a7d1-4cf7-b6bc-80510a2c6b1b" Jan 27 19:14:43 crc kubenswrapper[4776]: I0127 19:14:43.161963 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 27 19:14:43 crc kubenswrapper[4776]: I0127 19:14:43.556118 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 27 19:14:58 crc kubenswrapper[4776]: I0127 19:14:58.062378 4776 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.164093 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm"] Jan 27 19:15:00 crc kubenswrapper[4776]: E0127 19:15:00.164634 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.164648 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.164782 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.165282 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.167133 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.167179 4776 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.171464 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.171684 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq5dz\" (UniqueName: \"kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.171783 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.173923 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm"] Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.272907 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.272962 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq5dz\" (UniqueName: \"kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.272987 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.274032 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.278694 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.296915 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq5dz\" (UniqueName: \"kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz\") pod \"collect-profiles-29492355-bqnsm\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.486046 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.902444 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm"] Jan 27 19:15:00 crc kubenswrapper[4776]: W0127 19:15:00.910437 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0e67329c_0987_4706_84cc_a423956eaf85.slice/crio-bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1 WatchSource:0}: Error finding container bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1: Status 404 returned error can't find the container with id bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1 Jan 27 19:15:00 crc kubenswrapper[4776]: I0127 19:15:00.977768 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" event={"ID":"0e67329c-0987-4706-84cc-a423956eaf85","Type":"ContainerStarted","Data":"bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1"} Jan 27 19:15:01 crc kubenswrapper[4776]: I0127 19:15:01.984388 4776 generic.go:334] "Generic (PLEG): container finished" podID="0e67329c-0987-4706-84cc-a423956eaf85" containerID="86b12689cffbffabd982fad7f0fa6cc88fd1f7011c407eea2028bde7a4bb82db" exitCode=0 Jan 27 19:15:01 crc kubenswrapper[4776]: I0127 19:15:01.984467 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" event={"ID":"0e67329c-0987-4706-84cc-a423956eaf85","Type":"ContainerDied","Data":"86b12689cffbffabd982fad7f0fa6cc88fd1f7011c407eea2028bde7a4bb82db"} Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.254341 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.411668 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kq5dz\" (UniqueName: \"kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz\") pod \"0e67329c-0987-4706-84cc-a423956eaf85\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.411767 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume\") pod \"0e67329c-0987-4706-84cc-a423956eaf85\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.411812 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume\") pod \"0e67329c-0987-4706-84cc-a423956eaf85\" (UID: \"0e67329c-0987-4706-84cc-a423956eaf85\") " Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.412507 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume" (OuterVolumeSpecName: "config-volume") pod "0e67329c-0987-4706-84cc-a423956eaf85" (UID: "0e67329c-0987-4706-84cc-a423956eaf85"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.417551 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz" (OuterVolumeSpecName: "kube-api-access-kq5dz") pod "0e67329c-0987-4706-84cc-a423956eaf85" (UID: "0e67329c-0987-4706-84cc-a423956eaf85"). InnerVolumeSpecName "kube-api-access-kq5dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.418252 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0e67329c-0987-4706-84cc-a423956eaf85" (UID: "0e67329c-0987-4706-84cc-a423956eaf85"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.513818 4776 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0e67329c-0987-4706-84cc-a423956eaf85-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.513869 4776 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0e67329c-0987-4706-84cc-a423956eaf85-config-volume\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.513890 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kq5dz\" (UniqueName: \"kubernetes.io/projected/0e67329c-0987-4706-84cc-a423956eaf85-kube-api-access-kq5dz\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.996169 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" event={"ID":"0e67329c-0987-4706-84cc-a423956eaf85","Type":"ContainerDied","Data":"bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1"} Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.996216 4776 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bbe55204e5acb6257c47f570cfc505b4927b25ea6fa884da3a3864e9e201ccf1" Jan 27 19:15:03 crc kubenswrapper[4776]: I0127 19:15:03.996269 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29492355-bqnsm" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.744721 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.745613 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9m7mm" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="registry-server" containerID="cri-o://4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" gracePeriod=30 Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.748822 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.749082 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fqbt9" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="registry-server" containerID="cri-o://ed516c774eb69c4922d73c21daad61bc3ad499299f07a118c45ec20a671421c8" gracePeriod=30 Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.767900 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.768131 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" containerID="cri-o://b05648f338d409859fa38a44ef5ef2a0697d7c526d9e867b0b25054bbf0e1606" gracePeriod=30 Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.774797 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.775014 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4q55b" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="registry-server" containerID="cri-o://35b4e5bbeea3d5dac551288eb161bcad1154405519d85afaf32dae547a15f52b" gracePeriod=30 Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.778937 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.779332 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-88bn9" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="registry-server" containerID="cri-o://6aaf7a2868b904e19ee19f774946bf52671b514d46b61ec02e48556c3f5b0d23" gracePeriod=30 Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.781924 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t29q"] Jan 27 19:15:17 crc kubenswrapper[4776]: E0127 19:15:17.782130 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e67329c-0987-4706-84cc-a423956eaf85" containerName="collect-profiles" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.782144 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e67329c-0987-4706-84cc-a423956eaf85" containerName="collect-profiles" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.782247 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e67329c-0987-4706-84cc-a423956eaf85" containerName="collect-profiles" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.782590 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.799747 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t29q"] Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.901258 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.901297 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vnbw\" (UniqueName: \"kubernetes.io/projected/959cc68c-3afe-4256-843c-c1ddb58c52a7-kube-api-access-6vnbw\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:17 crc kubenswrapper[4776]: I0127 19:15:17.901343 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:17 crc kubenswrapper[4776]: E0127 19:15:17.951960 4776 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b is running failed: container process not found" containerID="4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" cmd=["grpc_health_probe","-addr=:50051"] Jan 27 19:15:17 crc kubenswrapper[4776]: E0127 19:15:17.952292 4776 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b is running failed: container process not found" containerID="4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" cmd=["grpc_health_probe","-addr=:50051"] Jan 27 19:15:17 crc kubenswrapper[4776]: E0127 19:15:17.952631 4776 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b is running failed: container process not found" containerID="4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" cmd=["grpc_health_probe","-addr=:50051"] Jan 27 19:15:17 crc kubenswrapper[4776]: E0127 19:15:17.952676 4776 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-9m7mm" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="registry-server" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.002192 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.002284 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.002302 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vnbw\" (UniqueName: \"kubernetes.io/projected/959cc68c-3afe-4256-843c-c1ddb58c52a7-kube-api-access-6vnbw\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.003512 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.011090 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/959cc68c-3afe-4256-843c-c1ddb58c52a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.018919 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vnbw\" (UniqueName: \"kubernetes.io/projected/959cc68c-3afe-4256-843c-c1ddb58c52a7-kube-api-access-6vnbw\") pod \"marketplace-operator-79b997595-6t29q\" (UID: \"959cc68c-3afe-4256-843c-c1ddb58c52a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.093559 4776 generic.go:334] "Generic (PLEG): container finished" podID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerID="b05648f338d409859fa38a44ef5ef2a0697d7c526d9e867b0b25054bbf0e1606" exitCode=0 Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.093842 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" event={"ID":"6e7febfa-69fa-429e-85be-9d6d54d18760","Type":"ContainerDied","Data":"b05648f338d409859fa38a44ef5ef2a0697d7c526d9e867b0b25054bbf0e1606"} Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.108441 4776 generic.go:334] "Generic (PLEG): container finished" podID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerID="4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" exitCode=0 Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.108490 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerDied","Data":"4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b"} Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.110859 4776 generic.go:334] "Generic (PLEG): container finished" podID="fde48346-25d2-4251-b8f7-2e333cba2144" containerID="ed516c774eb69c4922d73c21daad61bc3ad499299f07a118c45ec20a671421c8" exitCode=0 Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.110903 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerDied","Data":"ed516c774eb69c4922d73c21daad61bc3ad499299f07a118c45ec20a671421c8"} Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.112537 4776 generic.go:334] "Generic (PLEG): container finished" podID="5807670f-4076-457a-81d3-218df5584035" containerID="6aaf7a2868b904e19ee19f774946bf52671b514d46b61ec02e48556c3f5b0d23" exitCode=0 Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.112573 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerDied","Data":"6aaf7a2868b904e19ee19f774946bf52671b514d46b61ec02e48556c3f5b0d23"} Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.114209 4776 generic.go:334] "Generic (PLEG): container finished" podID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerID="35b4e5bbeea3d5dac551288eb161bcad1154405519d85afaf32dae547a15f52b" exitCode=0 Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.114230 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerDied","Data":"35b4e5bbeea3d5dac551288eb161bcad1154405519d85afaf32dae547a15f52b"} Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.189050 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.195801 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.233889 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.260196 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.307475 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27rjp\" (UniqueName: \"kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp\") pod \"fde48346-25d2-4251-b8f7-2e333cba2144\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.307563 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content\") pod \"fde48346-25d2-4251-b8f7-2e333cba2144\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.308849 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkft8\" (UniqueName: \"kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8\") pod \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.308900 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities\") pod \"fde48346-25d2-4251-b8f7-2e333cba2144\" (UID: \"fde48346-25d2-4251-b8f7-2e333cba2144\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.308919 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities\") pod \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.308984 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities\") pod \"5807670f-4076-457a-81d3-218df5584035\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.309713 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities" (OuterVolumeSpecName: "utilities") pod "5807670f-4076-457a-81d3-218df5584035" (UID: "5807670f-4076-457a-81d3-218df5584035"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.310203 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities" (OuterVolumeSpecName: "utilities") pod "fde48346-25d2-4251-b8f7-2e333cba2144" (UID: "fde48346-25d2-4251-b8f7-2e333cba2144"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.310976 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities" (OuterVolumeSpecName: "utilities") pod "5d346b77-e45d-4fb8-be3e-9b82fce112eb" (UID: "5d346b77-e45d-4fb8-be3e-9b82fce112eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.312603 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp" (OuterVolumeSpecName: "kube-api-access-27rjp") pod "fde48346-25d2-4251-b8f7-2e333cba2144" (UID: "fde48346-25d2-4251-b8f7-2e333cba2144"). InnerVolumeSpecName "kube-api-access-27rjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.312655 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8" (OuterVolumeSpecName: "kube-api-access-xkft8") pod "5d346b77-e45d-4fb8-be3e-9b82fce112eb" (UID: "5d346b77-e45d-4fb8-be3e-9b82fce112eb"). InnerVolumeSpecName "kube-api-access-xkft8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.374341 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fde48346-25d2-4251-b8f7-2e333cba2144" (UID: "fde48346-25d2-4251-b8f7-2e333cba2144"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.392653 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6t29q"] Jan 27 19:15:18 crc kubenswrapper[4776]: W0127 19:15:18.398075 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod959cc68c_3afe_4256_843c_c1ddb58c52a7.slice/crio-774cecaa1979f402e2021a16b670301f2140e338e2c833a18188691645f1f44f WatchSource:0}: Error finding container 774cecaa1979f402e2021a16b670301f2140e338e2c833a18188691645f1f44f: Status 404 returned error can't find the container with id 774cecaa1979f402e2021a16b670301f2140e338e2c833a18188691645f1f44f Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.409480 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content\") pod \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\" (UID: \"5d346b77-e45d-4fb8-be3e-9b82fce112eb\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.409545 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mqdx\" (UniqueName: \"kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx\") pod \"5807670f-4076-457a-81d3-218df5584035\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.409636 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content\") pod \"5807670f-4076-457a-81d3-218df5584035\" (UID: \"5807670f-4076-457a-81d3-218df5584035\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410238 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkft8\" (UniqueName: \"kubernetes.io/projected/5d346b77-e45d-4fb8-be3e-9b82fce112eb-kube-api-access-xkft8\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410506 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410518 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410526 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410535 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27rjp\" (UniqueName: \"kubernetes.io/projected/fde48346-25d2-4251-b8f7-2e333cba2144-kube-api-access-27rjp\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.410543 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fde48346-25d2-4251-b8f7-2e333cba2144-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.412417 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx" (OuterVolumeSpecName: "kube-api-access-7mqdx") pod "5807670f-4076-457a-81d3-218df5584035" (UID: "5807670f-4076-457a-81d3-218df5584035"). InnerVolumeSpecName "kube-api-access-7mqdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.434295 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5d346b77-e45d-4fb8-be3e-9b82fce112eb" (UID: "5d346b77-e45d-4fb8-be3e-9b82fce112eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.511035 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5d346b77-e45d-4fb8-be3e-9b82fce112eb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.511065 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mqdx\" (UniqueName: \"kubernetes.io/projected/5807670f-4076-457a-81d3-218df5584035-kube-api-access-7mqdx\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.564034 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5807670f-4076-457a-81d3-218df5584035" (UID: "5807670f-4076-457a-81d3-218df5584035"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.613904 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5807670f-4076-457a-81d3-218df5584035-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.659389 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.723214 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.815501 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities\") pod \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.815585 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content\") pod \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.815633 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtlv7\" (UniqueName: \"kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7\") pod \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\" (UID: \"5eedb21e-27c3-4ca4-8a86-4ad640a95f82\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.816675 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities" (OuterVolumeSpecName: "utilities") pod "5eedb21e-27c3-4ca4-8a86-4ad640a95f82" (UID: "5eedb21e-27c3-4ca4-8a86-4ad640a95f82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.820111 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7" (OuterVolumeSpecName: "kube-api-access-mtlv7") pod "5eedb21e-27c3-4ca4-8a86-4ad640a95f82" (UID: "5eedb21e-27c3-4ca4-8a86-4ad640a95f82"). InnerVolumeSpecName "kube-api-access-mtlv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.870736 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5eedb21e-27c3-4ca4-8a86-4ad640a95f82" (UID: "5eedb21e-27c3-4ca4-8a86-4ad640a95f82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917056 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics\") pod \"6e7febfa-69fa-429e-85be-9d6d54d18760\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917415 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca\") pod \"6e7febfa-69fa-429e-85be-9d6d54d18760\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917529 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmqj6\" (UniqueName: \"kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6\") pod \"6e7febfa-69fa-429e-85be-9d6d54d18760\" (UID: \"6e7febfa-69fa-429e-85be-9d6d54d18760\") " Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917743 4776 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-utilities\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917799 4776 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917817 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtlv7\" (UniqueName: \"kubernetes.io/projected/5eedb21e-27c3-4ca4-8a86-4ad640a95f82-kube-api-access-mtlv7\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.917979 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "6e7febfa-69fa-429e-85be-9d6d54d18760" (UID: "6e7febfa-69fa-429e-85be-9d6d54d18760"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.921181 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "6e7febfa-69fa-429e-85be-9d6d54d18760" (UID: "6e7febfa-69fa-429e-85be-9d6d54d18760"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:15:18 crc kubenswrapper[4776]: I0127 19:15:18.922601 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6" (OuterVolumeSpecName: "kube-api-access-gmqj6") pod "6e7febfa-69fa-429e-85be-9d6d54d18760" (UID: "6e7febfa-69fa-429e-85be-9d6d54d18760"). InnerVolumeSpecName "kube-api-access-gmqj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.018580 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmqj6\" (UniqueName: \"kubernetes.io/projected/6e7febfa-69fa-429e-85be-9d6d54d18760-kube-api-access-gmqj6\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.018620 4776 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.018633 4776 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e7febfa-69fa-429e-85be-9d6d54d18760-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.120168 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" event={"ID":"6e7febfa-69fa-429e-85be-9d6d54d18760","Type":"ContainerDied","Data":"fb61f7abf430194f3f1783835a05a8ce592b9ea53d53c11886451ccc884a4bb2"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.120195 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqlbs" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.120235 4776 scope.go:117] "RemoveContainer" containerID="b05648f338d409859fa38a44ef5ef2a0697d7c526d9e867b0b25054bbf0e1606" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.122513 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9m7mm" event={"ID":"5eedb21e-27c3-4ca4-8a86-4ad640a95f82","Type":"ContainerDied","Data":"0a7cd1b0e960fbf055cd25f7b5d5a1d12680a3b37ec3ae3a185c3fda7f02a6e5"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.122622 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9m7mm" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.127789 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fqbt9" event={"ID":"fde48346-25d2-4251-b8f7-2e333cba2144","Type":"ContainerDied","Data":"6ef4413c92eb68b769e1bccb1032c33b6747fcbd64b2b6328eb2969ca54422cc"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.127875 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fqbt9" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.131200 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q55b" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.131202 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q55b" event={"ID":"5d346b77-e45d-4fb8-be3e-9b82fce112eb","Type":"ContainerDied","Data":"7bc1ef1e3a199d26d92fd9b9c433e535e9baff86634fac27d41d7ab9d405bbab"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.133579 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-88bn9" event={"ID":"5807670f-4076-457a-81d3-218df5584035","Type":"ContainerDied","Data":"6e0e990903fbffaa4267f1fddba17b619b3d2a439d3cb79ea7f144a34fc1cf9e"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.133659 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-88bn9" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.136142 4776 scope.go:117] "RemoveContainer" containerID="4c4179d18b4320602e53d654991d24b1e5e1df201239ce72f2c929af07cec93b" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.136621 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" event={"ID":"959cc68c-3afe-4256-843c-c1ddb58c52a7","Type":"ContainerStarted","Data":"15ce8f071e39ce9c4694b95f151d4c6d9771d3e4ade86964cd1107e51378ee4a"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.136666 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" event={"ID":"959cc68c-3afe-4256-843c-c1ddb58c52a7","Type":"ContainerStarted","Data":"774cecaa1979f402e2021a16b670301f2140e338e2c833a18188691645f1f44f"} Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.136985 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.141034 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.160190 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6t29q" podStartSLOduration=2.160166266 podStartE2EDuration="2.160166266s" podCreationTimestamp="2026-01-27 19:15:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:15:19.154180278 +0000 UTC m=+321.659785435" watchObservedRunningTime="2026-01-27 19:15:19.160166266 +0000 UTC m=+321.665771403" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.160945 4776 scope.go:117] "RemoveContainer" containerID="d12b15a36dcd0a0ca46ca2636b4919651751c8c3feddc3cbd7a1147b97259005" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.199028 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.203846 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9m7mm"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.210160 4776 scope.go:117] "RemoveContainer" containerID="0691f13cc5821bee0bfc9d3f10db30c078c2b2394cb3172b02e21ab05a325196" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.219596 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.232964 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqlbs"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.234902 4776 scope.go:117] "RemoveContainer" containerID="ed516c774eb69c4922d73c21daad61bc3ad499299f07a118c45ec20a671421c8" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.238452 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.242587 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-88bn9"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.248844 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.249905 4776 scope.go:117] "RemoveContainer" containerID="74758b1f431594f3be20c519721f5f00b765774c3c262bbbd4803294cb538917" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.250005 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fqbt9"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.252897 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.255627 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q55b"] Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.262756 4776 scope.go:117] "RemoveContainer" containerID="4f918583c7a71365666f6866107bce1d511c7c1f9fb43e0ea18868def4e779d5" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.279826 4776 scope.go:117] "RemoveContainer" containerID="35b4e5bbeea3d5dac551288eb161bcad1154405519d85afaf32dae547a15f52b" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.294482 4776 scope.go:117] "RemoveContainer" containerID="23a304355defc7aa500aeb79ae4dd440776969fa5ce40af099440254919ec2eb" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.306428 4776 scope.go:117] "RemoveContainer" containerID="aa6ace086f65882dd1bcafdb737f91084ff76c6783ee2d979a367ce0c6644796" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.319947 4776 scope.go:117] "RemoveContainer" containerID="6aaf7a2868b904e19ee19f774946bf52671b514d46b61ec02e48556c3f5b0d23" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.332723 4776 scope.go:117] "RemoveContainer" containerID="d0b400833ec38fd654a2ca1e3213fba437f6dce8308e7ff0578af5e2be3f9bbe" Jan 27 19:15:19 crc kubenswrapper[4776]: I0127 19:15:19.346846 4776 scope.go:117] "RemoveContainer" containerID="1a52916c9346a7d8d581b1d3a77b6cc7d29a8861e8968dfa074f59d9d7c861f4" Jan 27 19:15:20 crc kubenswrapper[4776]: I0127 19:15:20.324532 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5807670f-4076-457a-81d3-218df5584035" path="/var/lib/kubelet/pods/5807670f-4076-457a-81d3-218df5584035/volumes" Jan 27 19:15:20 crc kubenswrapper[4776]: I0127 19:15:20.326100 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" path="/var/lib/kubelet/pods/5d346b77-e45d-4fb8-be3e-9b82fce112eb/volumes" Jan 27 19:15:20 crc kubenswrapper[4776]: I0127 19:15:20.326731 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" path="/var/lib/kubelet/pods/5eedb21e-27c3-4ca4-8a86-4ad640a95f82/volumes" Jan 27 19:15:20 crc kubenswrapper[4776]: I0127 19:15:20.328175 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" path="/var/lib/kubelet/pods/6e7febfa-69fa-429e-85be-9d6d54d18760/volumes" Jan 27 19:15:20 crc kubenswrapper[4776]: I0127 19:15:20.328707 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" path="/var/lib/kubelet/pods/fde48346-25d2-4251-b8f7-2e333cba2144/volumes" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412043 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gh594"] Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412496 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412507 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412518 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412524 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412535 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412541 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412548 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412553 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412562 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412567 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412575 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412580 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412589 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412594 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412604 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412609 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412616 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412624 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="extract-utilities" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412630 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412635 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412644 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412649 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412658 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412663 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: E0127 19:15:23.412670 4776 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412676 4776 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="extract-content" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412750 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="5807670f-4076-457a-81d3-218df5584035" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412760 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d346b77-e45d-4fb8-be3e-9b82fce112eb" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412785 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="fde48346-25d2-4251-b8f7-2e333cba2144" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412795 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e7febfa-69fa-429e-85be-9d6d54d18760" containerName="marketplace-operator" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.412801 4776 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eedb21e-27c3-4ca4-8a86-4ad640a95f82" containerName="registry-server" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.413108 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.426913 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gh594"] Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573281 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-trusted-ca\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573328 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573357 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573395 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-certificates\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573416 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzkfb\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-kube-api-access-qzkfb\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573456 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573472 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-bound-sa-token\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.573487 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-tls\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.601260 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674055 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-certificates\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674104 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzkfb\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-kube-api-access-qzkfb\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674148 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-bound-sa-token\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674166 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-tls\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674189 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-trusted-ca\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674239 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.674265 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.675461 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-ca-trust-extracted\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.675560 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-trusted-ca\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.675849 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-certificates\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.679619 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-installation-pull-secrets\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.680647 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-registry-tls\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.698159 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-bound-sa-token\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.699410 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzkfb\" (UniqueName: \"kubernetes.io/projected/0d6eefa0-a750-46e3-a45f-a7f9f9bb947d-kube-api-access-qzkfb\") pod \"image-registry-66df7c8f76-gh594\" (UID: \"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d\") " pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:23 crc kubenswrapper[4776]: I0127 19:15:23.728255 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.101620 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-gh594"] Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.176262 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" event={"ID":"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d","Type":"ContainerStarted","Data":"217b9175d407f938e7aefda96e2b058235e901bfdd4db05f70b54c8165b1c3cd"} Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.569153 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-85nkf"] Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.570411 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.572337 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.580792 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-85nkf"] Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.684907 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-utilities\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.684976 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-catalog-content\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.685040 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s48w2\" (UniqueName: \"kubernetes.io/projected/83deca64-3398-485d-b9b1-89f499b612d5-kube-api-access-s48w2\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.775333 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tx99t"] Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.776515 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.780096 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.788432 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tx99t"] Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.789035 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s48w2\" (UniqueName: \"kubernetes.io/projected/83deca64-3398-485d-b9b1-89f499b612d5-kube-api-access-s48w2\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.789125 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-utilities\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.789154 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-catalog-content\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.789514 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-utilities\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.789653 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83deca64-3398-485d-b9b1-89f499b612d5-catalog-content\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.816675 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s48w2\" (UniqueName: \"kubernetes.io/projected/83deca64-3398-485d-b9b1-89f499b612d5-kube-api-access-s48w2\") pod \"redhat-operators-85nkf\" (UID: \"83deca64-3398-485d-b9b1-89f499b612d5\") " pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.886146 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.890472 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcvp7\" (UniqueName: \"kubernetes.io/projected/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-kube-api-access-hcvp7\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.890515 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-utilities\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.890785 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-catalog-content\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.991846 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcvp7\" (UniqueName: \"kubernetes.io/projected/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-kube-api-access-hcvp7\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.992177 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-utilities\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.992205 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-catalog-content\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.992671 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-utilities\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:24 crc kubenswrapper[4776]: I0127 19:15:24.992720 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-catalog-content\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.010818 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcvp7\" (UniqueName: \"kubernetes.io/projected/c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac-kube-api-access-hcvp7\") pod \"redhat-marketplace-tx99t\" (UID: \"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac\") " pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.095798 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.183244 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" event={"ID":"0d6eefa0-a750-46e3-a45f-a7f9f9bb947d","Type":"ContainerStarted","Data":"05e23b4b13de6041068c70b6f808cb52f07c5c602cf9c376f3e74a1a371928f2"} Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.184999 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.207740 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" podStartSLOduration=2.20771547 podStartE2EDuration="2.20771547s" podCreationTimestamp="2026-01-27 19:15:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-27 19:15:25.19891531 +0000 UTC m=+327.704520457" watchObservedRunningTime="2026-01-27 19:15:25.20771547 +0000 UTC m=+327.713320627" Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.283108 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-85nkf"] Jan 27 19:15:25 crc kubenswrapper[4776]: W0127 19:15:25.289879 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod83deca64_3398_485d_b9b1_89f499b612d5.slice/crio-26eb080f2738a7471b125d3c1b81e3a7002ee09868914f3ff4f04353fc21e674 WatchSource:0}: Error finding container 26eb080f2738a7471b125d3c1b81e3a7002ee09868914f3ff4f04353fc21e674: Status 404 returned error can't find the container with id 26eb080f2738a7471b125d3c1b81e3a7002ee09868914f3ff4f04353fc21e674 Jan 27 19:15:25 crc kubenswrapper[4776]: I0127 19:15:25.473625 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tx99t"] Jan 27 19:15:25 crc kubenswrapper[4776]: W0127 19:15:25.480940 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4f0eea7_9b6d_4bff_9b37_66b4b976d8ac.slice/crio-e5e5d7a0b502eec941391c86adf942e0f5a34fff5d3da647161fa554c0143859 WatchSource:0}: Error finding container e5e5d7a0b502eec941391c86adf942e0f5a34fff5d3da647161fa554c0143859: Status 404 returned error can't find the container with id e5e5d7a0b502eec941391c86adf942e0f5a34fff5d3da647161fa554c0143859 Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.190427 4776 generic.go:334] "Generic (PLEG): container finished" podID="83deca64-3398-485d-b9b1-89f499b612d5" containerID="f22fc6f1ead6b1b8371e3a343d2bc563ff62de6c7f1fc379928ff001da996234" exitCode=0 Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.190575 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-85nkf" event={"ID":"83deca64-3398-485d-b9b1-89f499b612d5","Type":"ContainerDied","Data":"f22fc6f1ead6b1b8371e3a343d2bc563ff62de6c7f1fc379928ff001da996234"} Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.190747 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-85nkf" event={"ID":"83deca64-3398-485d-b9b1-89f499b612d5","Type":"ContainerStarted","Data":"26eb080f2738a7471b125d3c1b81e3a7002ee09868914f3ff4f04353fc21e674"} Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.193001 4776 generic.go:334] "Generic (PLEG): container finished" podID="c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac" containerID="aaf39fb4a575799003a2756be357c868a1d6a9dcc1f524d6043b08b077230a6f" exitCode=0 Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.193227 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tx99t" event={"ID":"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac","Type":"ContainerDied","Data":"aaf39fb4a575799003a2756be357c868a1d6a9dcc1f524d6043b08b077230a6f"} Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.193288 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tx99t" event={"ID":"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac","Type":"ContainerStarted","Data":"e5e5d7a0b502eec941391c86adf942e0f5a34fff5d3da647161fa554c0143859"} Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.974811 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9hx29"] Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.976421 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.978585 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 27 19:15:26 crc kubenswrapper[4776]: I0127 19:15:26.992199 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9hx29"] Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.130020 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-utilities\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.130067 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-catalog-content\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.130092 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s87p6\" (UniqueName: \"kubernetes.io/projected/5f7ba033-057a-485f-ac38-879779c30537-kube-api-access-s87p6\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.170149 4776 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-swmpc"] Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.171139 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.173006 4776 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.189288 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-swmpc"] Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.199954 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-85nkf" event={"ID":"83deca64-3398-485d-b9b1-89f499b612d5","Type":"ContainerStarted","Data":"969fd63a7648c45282958961e2befb75e18b05cd1874e000f43d54f74f85aef0"} Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.205903 4776 generic.go:334] "Generic (PLEG): container finished" podID="c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac" containerID="8fa16d2d7ec4d06f167d57b1a02223b254f67bdbaa99067deff5b19186338bae" exitCode=0 Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.207137 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tx99t" event={"ID":"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac","Type":"ContainerDied","Data":"8fa16d2d7ec4d06f167d57b1a02223b254f67bdbaa99067deff5b19186338bae"} Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.231083 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-utilities\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.231469 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-catalog-content\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.231508 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s87p6\" (UniqueName: \"kubernetes.io/projected/5f7ba033-057a-485f-ac38-879779c30537-kube-api-access-s87p6\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.232098 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-catalog-content\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.232300 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f7ba033-057a-485f-ac38-879779c30537-utilities\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.253674 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s87p6\" (UniqueName: \"kubernetes.io/projected/5f7ba033-057a-485f-ac38-879779c30537-kube-api-access-s87p6\") pod \"community-operators-9hx29\" (UID: \"5f7ba033-057a-485f-ac38-879779c30537\") " pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.333043 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-catalog-content\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.333191 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-utilities\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.333255 4776 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gph8\" (UniqueName: \"kubernetes.io/projected/af9a6d64-d035-4f4f-af19-64433a66cfcb-kube-api-access-5gph8\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.336677 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.434801 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-utilities\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.434873 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gph8\" (UniqueName: \"kubernetes.io/projected/af9a6d64-d035-4f4f-af19-64433a66cfcb-kube-api-access-5gph8\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.434917 4776 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-catalog-content\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.435821 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-catalog-content\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.436091 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af9a6d64-d035-4f4f-af19-64433a66cfcb-utilities\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.456027 4776 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gph8\" (UniqueName: \"kubernetes.io/projected/af9a6d64-d035-4f4f-af19-64433a66cfcb-kube-api-access-5gph8\") pod \"certified-operators-swmpc\" (UID: \"af9a6d64-d035-4f4f-af19-64433a66cfcb\") " pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.488377 4776 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.761799 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9hx29"] Jan 27 19:15:27 crc kubenswrapper[4776]: W0127 19:15:27.765540 4776 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f7ba033_057a_485f_ac38_879779c30537.slice/crio-44652973370e7f538b4691a0fdfca8917d55d0a5b1047c248126fb635611efc6 WatchSource:0}: Error finding container 44652973370e7f538b4691a0fdfca8917d55d0a5b1047c248126fb635611efc6: Status 404 returned error can't find the container with id 44652973370e7f538b4691a0fdfca8917d55d0a5b1047c248126fb635611efc6 Jan 27 19:15:27 crc kubenswrapper[4776]: I0127 19:15:27.928969 4776 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-swmpc"] Jan 27 19:15:28 crc kubenswrapper[4776]: I0127 19:15:28.211170 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-swmpc" event={"ID":"af9a6d64-d035-4f4f-af19-64433a66cfcb","Type":"ContainerStarted","Data":"a3d16558ac1c52a43b0e8616a607e3be7988ea80bc48377790fd72e0c273848b"} Jan 27 19:15:28 crc kubenswrapper[4776]: I0127 19:15:28.212163 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9hx29" event={"ID":"5f7ba033-057a-485f-ac38-879779c30537","Type":"ContainerStarted","Data":"44652973370e7f538b4691a0fdfca8917d55d0a5b1047c248126fb635611efc6"} Jan 27 19:15:28 crc kubenswrapper[4776]: I0127 19:15:28.213882 4776 generic.go:334] "Generic (PLEG): container finished" podID="83deca64-3398-485d-b9b1-89f499b612d5" containerID="969fd63a7648c45282958961e2befb75e18b05cd1874e000f43d54f74f85aef0" exitCode=0 Jan 27 19:15:28 crc kubenswrapper[4776]: I0127 19:15:28.213926 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-85nkf" event={"ID":"83deca64-3398-485d-b9b1-89f499b612d5","Type":"ContainerDied","Data":"969fd63a7648c45282958961e2befb75e18b05cd1874e000f43d54f74f85aef0"} Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.223019 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tx99t" event={"ID":"c4f0eea7-9b6d-4bff-9b37-66b4b976d8ac","Type":"ContainerStarted","Data":"aa6d81bc94e63e5734e11c9eb97574f1e601da554942b7f5de5f6ab0d666522a"} Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.225102 4776 generic.go:334] "Generic (PLEG): container finished" podID="af9a6d64-d035-4f4f-af19-64433a66cfcb" containerID="e2afdfd741900382273e03bf4f7a3ddfaf0feed119ecf1e0e5167ab678a5e891" exitCode=0 Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.225218 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-swmpc" event={"ID":"af9a6d64-d035-4f4f-af19-64433a66cfcb","Type":"ContainerDied","Data":"e2afdfd741900382273e03bf4f7a3ddfaf0feed119ecf1e0e5167ab678a5e891"} Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.227895 4776 generic.go:334] "Generic (PLEG): container finished" podID="5f7ba033-057a-485f-ac38-879779c30537" containerID="0ab8376f5836ac601170dfff8c7be799ee24212061dbd9348508d6102c466035" exitCode=0 Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.227956 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9hx29" event={"ID":"5f7ba033-057a-485f-ac38-879779c30537","Type":"ContainerDied","Data":"0ab8376f5836ac601170dfff8c7be799ee24212061dbd9348508d6102c466035"} Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.236949 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-85nkf" event={"ID":"83deca64-3398-485d-b9b1-89f499b612d5","Type":"ContainerStarted","Data":"c4e7bb4097d50768fe44dc284690dfb98375be44f9b826a514361b4bc2d98187"} Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.252247 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tx99t" podStartSLOduration=3.771968588 podStartE2EDuration="5.252216955s" podCreationTimestamp="2026-01-27 19:15:24 +0000 UTC" firstStartedPulling="2026-01-27 19:15:26.194931492 +0000 UTC m=+328.700536679" lastFinishedPulling="2026-01-27 19:15:27.675179899 +0000 UTC m=+330.180785046" observedRunningTime="2026-01-27 19:15:29.24985649 +0000 UTC m=+331.755461647" watchObservedRunningTime="2026-01-27 19:15:29.252216955 +0000 UTC m=+331.757822102" Jan 27 19:15:29 crc kubenswrapper[4776]: I0127 19:15:29.300711 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-85nkf" podStartSLOduration=2.899225061 podStartE2EDuration="5.300690078s" podCreationTimestamp="2026-01-27 19:15:24 +0000 UTC" firstStartedPulling="2026-01-27 19:15:26.193079554 +0000 UTC m=+328.698684711" lastFinishedPulling="2026-01-27 19:15:28.594544581 +0000 UTC m=+331.100149728" observedRunningTime="2026-01-27 19:15:29.294846412 +0000 UTC m=+331.800451559" watchObservedRunningTime="2026-01-27 19:15:29.300690078 +0000 UTC m=+331.806295235" Jan 27 19:15:30 crc kubenswrapper[4776]: I0127 19:15:30.243111 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-swmpc" event={"ID":"af9a6d64-d035-4f4f-af19-64433a66cfcb","Type":"ContainerStarted","Data":"17eff4c51d96b2d394e69c65dafe4b9710da659379949b8d38834a460ef0fe14"} Jan 27 19:15:30 crc kubenswrapper[4776]: I0127 19:15:30.255942 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9hx29" event={"ID":"5f7ba033-057a-485f-ac38-879779c30537","Type":"ContainerStarted","Data":"b7793ff18fe3ddeeec6ca0ae5c92a89d490e2d61515875f51ca71bd062d9d30a"} Jan 27 19:15:31 crc kubenswrapper[4776]: I0127 19:15:31.253423 4776 generic.go:334] "Generic (PLEG): container finished" podID="af9a6d64-d035-4f4f-af19-64433a66cfcb" containerID="17eff4c51d96b2d394e69c65dafe4b9710da659379949b8d38834a460ef0fe14" exitCode=0 Jan 27 19:15:31 crc kubenswrapper[4776]: I0127 19:15:31.253473 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-swmpc" event={"ID":"af9a6d64-d035-4f4f-af19-64433a66cfcb","Type":"ContainerDied","Data":"17eff4c51d96b2d394e69c65dafe4b9710da659379949b8d38834a460ef0fe14"} Jan 27 19:15:31 crc kubenswrapper[4776]: I0127 19:15:31.257213 4776 generic.go:334] "Generic (PLEG): container finished" podID="5f7ba033-057a-485f-ac38-879779c30537" containerID="b7793ff18fe3ddeeec6ca0ae5c92a89d490e2d61515875f51ca71bd062d9d30a" exitCode=0 Jan 27 19:15:31 crc kubenswrapper[4776]: I0127 19:15:31.257243 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9hx29" event={"ID":"5f7ba033-057a-485f-ac38-879779c30537","Type":"ContainerDied","Data":"b7793ff18fe3ddeeec6ca0ae5c92a89d490e2d61515875f51ca71bd062d9d30a"} Jan 27 19:15:32 crc kubenswrapper[4776]: I0127 19:15:32.275115 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-swmpc" event={"ID":"af9a6d64-d035-4f4f-af19-64433a66cfcb","Type":"ContainerStarted","Data":"74ef3bf1479fbfe6e2e34bf29cb1ae9bb220239818a224c239634d66cafd9966"} Jan 27 19:15:32 crc kubenswrapper[4776]: I0127 19:15:32.278843 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9hx29" event={"ID":"5f7ba033-057a-485f-ac38-879779c30537","Type":"ContainerStarted","Data":"88161601f98528256843bc54ad7b31f025274ba5a1055894abcbc9db99fad31d"} Jan 27 19:15:32 crc kubenswrapper[4776]: I0127 19:15:32.317044 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9hx29" podStartSLOduration=3.892600749 podStartE2EDuration="6.317019117s" podCreationTimestamp="2026-01-27 19:15:26 +0000 UTC" firstStartedPulling="2026-01-27 19:15:29.231375562 +0000 UTC m=+331.736980709" lastFinishedPulling="2026-01-27 19:15:31.65579393 +0000 UTC m=+334.161399077" observedRunningTime="2026-01-27 19:15:32.315856839 +0000 UTC m=+334.821461996" watchObservedRunningTime="2026-01-27 19:15:32.317019117 +0000 UTC m=+334.822624284" Jan 27 19:15:32 crc kubenswrapper[4776]: I0127 19:15:32.317502 4776 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-swmpc" podStartSLOduration=2.8202329539999997 podStartE2EDuration="5.317497431s" podCreationTimestamp="2026-01-27 19:15:27 +0000 UTC" firstStartedPulling="2026-01-27 19:15:29.226258879 +0000 UTC m=+331.731864026" lastFinishedPulling="2026-01-27 19:15:31.723523356 +0000 UTC m=+334.229128503" observedRunningTime="2026-01-27 19:15:32.29514136 +0000 UTC m=+334.800746507" watchObservedRunningTime="2026-01-27 19:15:32.317497431 +0000 UTC m=+334.823102588" Jan 27 19:15:34 crc kubenswrapper[4776]: I0127 19:15:34.886629 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:34 crc kubenswrapper[4776]: I0127 19:15:34.887003 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:34 crc kubenswrapper[4776]: I0127 19:15:34.953803 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:35 crc kubenswrapper[4776]: I0127 19:15:35.096813 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:35 crc kubenswrapper[4776]: I0127 19:15:35.096874 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:35 crc kubenswrapper[4776]: I0127 19:15:35.143082 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:35 crc kubenswrapper[4776]: I0127 19:15:35.335713 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tx99t" Jan 27 19:15:35 crc kubenswrapper[4776]: I0127 19:15:35.371871 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-85nkf" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.337795 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.337839 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.396798 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.489039 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.489081 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:37 crc kubenswrapper[4776]: I0127 19:15:37.529082 4776 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:38 crc kubenswrapper[4776]: I0127 19:15:38.353238 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9hx29" Jan 27 19:15:38 crc kubenswrapper[4776]: I0127 19:15:38.370206 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-swmpc" Jan 27 19:15:43 crc kubenswrapper[4776]: I0127 19:15:43.739366 4776 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-gh594" Jan 27 19:15:43 crc kubenswrapper[4776]: I0127 19:15:43.826558 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:16:08 crc kubenswrapper[4776]: I0127 19:16:08.299838 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:16:08 crc kubenswrapper[4776]: I0127 19:16:08.300306 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:16:08 crc kubenswrapper[4776]: I0127 19:16:08.868874 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" podUID="ecca5f11-1043-4e40-b554-8a63ecc36423" containerName="registry" containerID="cri-o://272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d" gracePeriod=30 Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.207563 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336130 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336163 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336210 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv2kp\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336248 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336393 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336432 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336472 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336496 4776 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls\") pod \"ecca5f11-1043-4e40-b554-8a63ecc36423\" (UID: \"ecca5f11-1043-4e40-b554-8a63ecc36423\") " Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.336995 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.337048 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.341731 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.342034 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp" (OuterVolumeSpecName: "kube-api-access-rv2kp") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "kube-api-access-rv2kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.342375 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.342419 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.353361 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.360955 4776 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ecca5f11-1043-4e40-b554-8a63ecc36423" (UID: "ecca5f11-1043-4e40-b554-8a63ecc36423"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438180 4776 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438211 4776 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438220 4776 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438229 4776 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ecca5f11-1043-4e40-b554-8a63ecc36423-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438239 4776 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ecca5f11-1043-4e40-b554-8a63ecc36423-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438247 4776 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv2kp\" (UniqueName: \"kubernetes.io/projected/ecca5f11-1043-4e40-b554-8a63ecc36423-kube-api-access-rv2kp\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.438255 4776 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ecca5f11-1043-4e40-b554-8a63ecc36423-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.516614 4776 generic.go:334] "Generic (PLEG): container finished" podID="ecca5f11-1043-4e40-b554-8a63ecc36423" containerID="272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d" exitCode=0 Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.516655 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" event={"ID":"ecca5f11-1043-4e40-b554-8a63ecc36423","Type":"ContainerDied","Data":"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d"} Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.516684 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" event={"ID":"ecca5f11-1043-4e40-b554-8a63ecc36423","Type":"ContainerDied","Data":"54d57f282b95456b13c7af5486ab36331a80cdeed5bcd511db9c3fe669eaa9ba"} Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.516700 4776 scope.go:117] "RemoveContainer" containerID="272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.516727 4776 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zh827" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.534681 4776 scope.go:117] "RemoveContainer" containerID="272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d" Jan 27 19:16:09 crc kubenswrapper[4776]: E0127 19:16:09.535129 4776 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d\": container with ID starting with 272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d not found: ID does not exist" containerID="272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.535164 4776 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d"} err="failed to get container status \"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d\": rpc error: code = NotFound desc = could not find container \"272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d\": container with ID starting with 272881345a7eaeb91db523807072106aae15caedede48d1b3fd0ecd9781cc75d not found: ID does not exist" Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.555761 4776 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:16:09 crc kubenswrapper[4776]: I0127 19:16:09.560587 4776 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zh827"] Jan 27 19:16:10 crc kubenswrapper[4776]: I0127 19:16:10.326754 4776 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecca5f11-1043-4e40-b554-8a63ecc36423" path="/var/lib/kubelet/pods/ecca5f11-1043-4e40-b554-8a63ecc36423/volumes" Jan 27 19:16:38 crc kubenswrapper[4776]: I0127 19:16:38.300131 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:16:38 crc kubenswrapper[4776]: I0127 19:16:38.300815 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.299596 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.300308 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.300382 4776 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.301401 4776 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab"} pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.301527 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" containerID="cri-o://bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab" gracePeriod=600 Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.889862 4776 generic.go:334] "Generic (PLEG): container finished" podID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerID="bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab" exitCode=0 Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.889973 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerDied","Data":"bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab"} Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.890371 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"24b4e535a39649df429c98dfb56e1e8c5dfa520f8a61d371dbb0a58851bcca10"} Jan 27 19:17:08 crc kubenswrapper[4776]: I0127 19:17:08.890416 4776 scope.go:117] "RemoveContainer" containerID="4d45f7d26cb7e4a4cd483e37047ef4dbdb13f3db96c1474df908d29248496bde" Jan 27 19:19:08 crc kubenswrapper[4776]: I0127 19:19:08.305474 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:19:08 crc kubenswrapper[4776]: I0127 19:19:08.307126 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:19:38 crc kubenswrapper[4776]: I0127 19:19:38.298905 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:19:38 crc kubenswrapper[4776]: I0127 19:19:38.299491 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:20:08 crc kubenswrapper[4776]: I0127 19:20:08.300052 4776 patch_prober.go:28] interesting pod/machine-config-daemon-z2w9t container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 27 19:20:08 crc kubenswrapper[4776]: I0127 19:20:08.300760 4776 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 27 19:20:08 crc kubenswrapper[4776]: I0127 19:20:08.300909 4776 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" Jan 27 19:20:08 crc kubenswrapper[4776]: I0127 19:20:08.301920 4776 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"24b4e535a39649df429c98dfb56e1e8c5dfa520f8a61d371dbb0a58851bcca10"} pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 27 19:20:08 crc kubenswrapper[4776]: I0127 19:20:08.302030 4776 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" podUID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerName="machine-config-daemon" containerID="cri-o://24b4e535a39649df429c98dfb56e1e8c5dfa520f8a61d371dbb0a58851bcca10" gracePeriod=600 Jan 27 19:20:09 crc kubenswrapper[4776]: I0127 19:20:09.434397 4776 generic.go:334] "Generic (PLEG): container finished" podID="7ebf810d-762c-445f-9705-a74d274fcf1b" containerID="24b4e535a39649df429c98dfb56e1e8c5dfa520f8a61d371dbb0a58851bcca10" exitCode=0 Jan 27 19:20:09 crc kubenswrapper[4776]: I0127 19:20:09.434496 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerDied","Data":"24b4e535a39649df429c98dfb56e1e8c5dfa520f8a61d371dbb0a58851bcca10"} Jan 27 19:20:09 crc kubenswrapper[4776]: I0127 19:20:09.435045 4776 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z2w9t" event={"ID":"7ebf810d-762c-445f-9705-a74d274fcf1b","Type":"ContainerStarted","Data":"7d38fc905a1964544535e4a5f456b12421d69075a2040983331b781c07e059ff"} Jan 27 19:20:09 crc kubenswrapper[4776]: I0127 19:20:09.435072 4776 scope.go:117] "RemoveContainer" containerID="bc6ea7145472e9225fad2ed471d64235159ab5f972af88459dac501e9bf5feab" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515136210137024444 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015136210140017353 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015136206324016507 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015136206324015457 5ustar corecore